Fix expunge transaction boundaries

This commit is contained in:
James Agnew 2018-06-25 10:22:49 -04:00
parent e5cb609f4d
commit f0da7a33de
7 changed files with 217 additions and 160 deletions

View File

@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.dao;
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License. * you may not use this file except in compliance with the License.
* You may obtain a copy of the License at * You may obtain a copy of the License at
* *
* http://www.apache.org/licenses/LICENSE-2.0 * http://www.apache.org/licenses/LICENSE-2.0
* *
* Unless required by applicable law or agreed to in writing, software * Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, * distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@ -58,7 +58,6 @@ import ca.uhn.fhir.util.*;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Charsets; import com.google.common.base.Charsets;
import com.google.common.collect.ArrayListMultimap; import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets; import com.google.common.collect.Sets;
import com.google.common.hash.HashFunction; import com.google.common.hash.HashFunction;
import com.google.common.hash.Hashing; import com.google.common.hash.Hashing;
@ -227,6 +226,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
} }
protected ExpungeOutcome doExpunge(String theResourceName, Long theResourceId, Long theVersion, ExpungeOptions theExpungeOptions) { protected ExpungeOutcome doExpunge(String theResourceName, Long theResourceId, Long theVersion, ExpungeOptions theExpungeOptions) {
TransactionTemplate txTemplate = new TransactionTemplate(myPlatformTransactionManager);
if (!getConfig().isExpungeEnabled()) { if (!getConfig().isExpungeEnabled()) {
throw new MethodNotAllowedException("$expunge is not enabled on this server"); throw new MethodNotAllowedException("$expunge is not enabled on this server");
@ -245,32 +245,39 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
/* /*
* Delete historical versions of deleted resources * Delete historical versions of deleted resources
*/ */
Pageable page = new PageRequest(0, remainingCount.get()); Pageable page = PageRequest.of(0, remainingCount.get());
Slice<Long> resourceIds; Slice<Long> resourceIds = txTemplate.execute(t -> {
if (theResourceId != null) { if (theResourceId != null) {
resourceIds = myResourceTableDao.findIdsOfDeletedResourcesOfType(page, theResourceId, theResourceName); return myResourceTableDao.findIdsOfDeletedResourcesOfType(page, theResourceId, theResourceName);
} else {
if (theResourceName != null) {
resourceIds = myResourceTableDao.findIdsOfDeletedResourcesOfType(page, theResourceName);
} else { } else {
resourceIds = myResourceTableDao.findIdsOfDeletedResources(page); if (theResourceName != null) {
return myResourceTableDao.findIdsOfDeletedResourcesOfType(page, theResourceName);
} else {
return myResourceTableDao.findIdsOfDeletedResources(page);
}
} }
} });
for (Long next : resourceIds) { for (Long next : resourceIds) {
expungeHistoricalVersionsOfId(next, remainingCount); txTemplate.execute(t -> {
if (remainingCount.get() <= 0) { expungeHistoricalVersionsOfId(next, remainingCount);
return toExpungeOutcome(theExpungeOptions, remainingCount); if (remainingCount.get() <= 0) {
} return toExpungeOutcome(theExpungeOptions, remainingCount);
}
return null;
});
} }
/* /*
* Delete current versions of deleted resources * Delete current versions of deleted resources
*/ */
for (Long next : resourceIds) { for (Long next : resourceIds) {
expungeCurrentVersionOfResource(next); txTemplate.execute(t -> {
if (remainingCount.get() <= 0) { expungeCurrentVersionOfResource(next);
return toExpungeOutcome(theExpungeOptions, remainingCount); if (remainingCount.get() <= 0) {
} return toExpungeOutcome(theExpungeOptions, remainingCount);
}
return null;
});
} }
} }
@ -280,22 +287,26 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
/* /*
* Delete historical versions of non-deleted resources * Delete historical versions of non-deleted resources
*/ */
Pageable page = new PageRequest(0, remainingCount.get()); Pageable page = PageRequest.of(0, remainingCount.get());
Slice<Long> historicalIds; Slice<Long> historicalIds = txTemplate.execute(t -> {
if (theResourceId != null && theVersion != null) { if (theResourceId != null && theVersion != null) {
historicalIds = toSlice(myResourceHistoryTableDao.findForIdAndVersion(theResourceId, theVersion)); return toSlice(myResourceHistoryTableDao.findForIdAndVersion(theResourceId, theVersion));
} else {
if (theResourceName != null) {
historicalIds = myResourceHistoryTableDao.findIdsOfPreviousVersionsOfResources(page, theResourceName);
} else { } else {
historicalIds = myResourceHistoryTableDao.findIdsOfPreviousVersionsOfResources(page); if (theResourceName != null) {
return myResourceHistoryTableDao.findIdsOfPreviousVersionsOfResources(page, theResourceName);
} else {
return myResourceHistoryTableDao.findIdsOfPreviousVersionsOfResources(page);
}
} }
} });
for (Long next : historicalIds) { for (Long next : historicalIds) {
expungeHistoricalVersion(next); txTemplate.execute(t -> {
if (remainingCount.decrementAndGet() <= 0) { expungeHistoricalVersion(next);
return toExpungeOutcome(theExpungeOptions, remainingCount); if (remainingCount.decrementAndGet() <= 0) {
} return toExpungeOutcome(theExpungeOptions, remainingCount);
}
return null;
});
} }
} }
@ -704,58 +715,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
return retVal; return retVal;
} }
@SuppressWarnings("unchecked")
public <R extends IBaseResource> IFhirResourceDao<R> getDao(Class<R> theType) {
Map<Class<? extends IBaseResource>, IFhirResourceDao<?>> resourceTypeToDao = getDaos();
IFhirResourceDao<R> dao = (IFhirResourceDao<R>) resourceTypeToDao.get(theType);
return dao;
}
protected IFhirResourceDao<?> getDaoOrThrowException(Class<? extends IBaseResource> theClass) {
IFhirResourceDao<? extends IBaseResource> retVal = getDao(theClass);
if (retVal == null) {
List<String> supportedResourceTypes = getDaos()
.keySet()
.stream()
.map(t->myContext.getResourceDefinition(t).getName())
.sorted()
.collect(Collectors.toList());
throw new InvalidRequestException("Unable to process request, this server does not know how to handle resources of type " + getContext().getResourceDefinition(theClass).getName() + " - Can handle: " + supportedResourceTypes);
}
return retVal;
}
private Map<Class<? extends IBaseResource>, IFhirResourceDao<?>> getDaos() {
if (myResourceTypeToDao == null) {
Map<Class<? extends IBaseResource>, IFhirResourceDao<?>> resourceTypeToDao = new HashMap<>();
Map<String, IFhirResourceDao> daos = myApplicationContext.getBeansOfType(IFhirResourceDao.class, false, false);
String[] beanNames = myApplicationContext.getBeanNamesForType(IFhirResourceDao.class);
for (IFhirResourceDao<?> next : daos.values()) {
resourceTypeToDao.put(next.getResourceType(), next);
}
if (this instanceof IFhirResourceDao<?>) {
IFhirResourceDao<?> thiz = (IFhirResourceDao<?>) this;
resourceTypeToDao.put(thiz.getResourceType(), thiz);
}
myResourceTypeToDao = resourceTypeToDao;
}
return Collections.unmodifiableMap(myResourceTypeToDao);
}
@PostConstruct
public void startClearCaches() {
myResourceTypeToDao = null;
}
protected Set<ResourceIndexedSearchParamCoords> extractSearchParamCoords(ResourceTable theEntity, IBaseResource theResource) { protected Set<ResourceIndexedSearchParamCoords> extractSearchParamCoords(ResourceTable theEntity, IBaseResource theResource) {
return mySearchParamExtractor.extractSearchParamCoords(theEntity, theResource); return mySearchParamExtractor.extractSearchParamCoords(theEntity, theResource);
} }
@ -958,18 +917,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
return myConfig; return myConfig;
} }
@Override
public void setApplicationContext(ApplicationContext theApplicationContext) throws BeansException {
/*
* We do a null check here because Smile's module system tries to
* initialize the application context twice if two modules depend on
* the persistence module. The second time sets the dependency's appctx.
*/
if (myApplicationContext == null) {
myApplicationContext = theApplicationContext;
}
}
public void setConfig(DaoConfig theConfig) { public void setConfig(DaoConfig theConfig) {
myConfig = theConfig; myConfig = theConfig;
} }
@ -996,6 +943,50 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
} }
} }
@SuppressWarnings("unchecked")
public <R extends IBaseResource> IFhirResourceDao<R> getDao(Class<R> theType) {
Map<Class<? extends IBaseResource>, IFhirResourceDao<?>> resourceTypeToDao = getDaos();
IFhirResourceDao<R> dao = (IFhirResourceDao<R>) resourceTypeToDao.get(theType);
return dao;
}
protected IFhirResourceDao<?> getDaoOrThrowException(Class<? extends IBaseResource> theClass) {
IFhirResourceDao<? extends IBaseResource> retVal = getDao(theClass);
if (retVal == null) {
List<String> supportedResourceTypes = getDaos()
.keySet()
.stream()
.map(t -> myContext.getResourceDefinition(t).getName())
.sorted()
.collect(Collectors.toList());
throw new InvalidRequestException("Unable to process request, this server does not know how to handle resources of type " + getContext().getResourceDefinition(theClass).getName() + " - Can handle: " + supportedResourceTypes);
}
return retVal;
}
private Map<Class<? extends IBaseResource>, IFhirResourceDao<?>> getDaos() {
if (myResourceTypeToDao == null) {
Map<Class<? extends IBaseResource>, IFhirResourceDao<?>> resourceTypeToDao = new HashMap<>();
Map<String, IFhirResourceDao> daos = myApplicationContext.getBeansOfType(IFhirResourceDao.class, false, false);
String[] beanNames = myApplicationContext.getBeanNamesForType(IFhirResourceDao.class);
for (IFhirResourceDao<?> next : daos.values()) {
resourceTypeToDao.put(next.getResourceType(), next);
}
if (this instanceof IFhirResourceDao<?>) {
IFhirResourceDao<?> thiz = (IFhirResourceDao<?>) this;
resourceTypeToDao.put(thiz.getResourceType(), thiz);
}
myResourceTypeToDao = resourceTypeToDao;
}
return Collections.unmodifiableMap(myResourceTypeToDao);
}
public IResourceIndexedCompositeStringUniqueDao getResourceIndexedCompositeStringUniqueDao() { public IResourceIndexedCompositeStringUniqueDao getResourceIndexedCompositeStringUniqueDao() {
return myResourceIndexedCompositeStringUniqueDao; return myResourceIndexedCompositeStringUniqueDao;
} }
@ -1537,6 +1528,18 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
return retVal; return retVal;
} }
@Override
public void setApplicationContext(ApplicationContext theApplicationContext) throws BeansException {
/*
* We do a null check here because Smile's module system tries to
* initialize the application context twice if two modules depend on
* the persistence module. The second time sets the dependency's appctx.
*/
if (myApplicationContext == null) {
myApplicationContext = theApplicationContext;
}
}
private void setUpdatedTime(Collection<? extends BaseResourceIndexedSearchParam> theParams, Date theUpdateTime) { private void setUpdatedTime(Collection<? extends BaseResourceIndexedSearchParam> theParams, Date theUpdateTime) {
for (BaseResourceIndexedSearchParam nextSearchParam : theParams) { for (BaseResourceIndexedSearchParam nextSearchParam : theParams) {
nextSearchParam.setUpdated(theUpdateTime); nextSearchParam.setUpdated(theUpdateTime);
@ -1593,6 +1596,11 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
return false; return false;
} }
@PostConstruct
public void startClearCaches() {
myResourceTypeToDao = null;
}
private ExpungeOutcome toExpungeOutcome(ExpungeOptions theExpungeOptions, AtomicInteger theRemainingCount) { private ExpungeOutcome toExpungeOutcome(ExpungeOptions theExpungeOptions, AtomicInteger theRemainingCount) {
return new ExpungeOutcome() return new ExpungeOutcome()
.setDeletedCount(theExpungeOptions.getLimit() - theRemainingCount.get()); .setDeletedCount(theExpungeOptions.getLimit() - theRemainingCount.get());
@ -1648,13 +1656,13 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
// get preload the tagList // get preload the tagList
Collection<? extends BaseTag> myTagList; Collection<? extends BaseTag> myTagList;
if (theTagList == null) if (theTagList == null)
myTagList = theEntity.getTags(); myTagList = theEntity.getTags();
else else
myTagList = theTagList; myTagList = theTagList;
/* /*
* Use the appropriate custom type if one is specified in the context * Use the appropriate custom type if one is specified in the context
*/ */
@ -1746,7 +1754,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> implements IDao,
return theResourceType + '/' + theId.toString(); return theResourceType + '/' + theId.toString();
} }
} }
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
protected ResourceTable updateEntity(RequestDetails theRequest, final IBaseResource theResource, ResourceTable protected ResourceTable updateEntity(RequestDetails theRequest, final IBaseResource theResource, ResourceTable
theEntity, Date theDeletedTimestampOrNull, boolean thePerformIndexing, theEntity, Date theDeletedTimestampOrNull, boolean thePerformIndexing,

View File

@ -50,6 +50,7 @@ import ca.uhn.fhir.rest.server.method.SearchMethodBinding;
import ca.uhn.fhir.util.*; import ca.uhn.fhir.util.*;
import org.apache.commons.lang3.Validate; import org.apache.commons.lang3.Validate;
import org.hl7.fhir.instance.model.api.*; import org.hl7.fhir.instance.model.api.*;
import org.hl7.fhir.r4.model.InstantType;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Required; import org.springframework.beans.factory.annotation.Required;
import org.springframework.lang.NonNull; import org.springframework.lang.NonNull;
@ -517,6 +518,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
} }
@Override @Override
@Transactional(propagation = Propagation.NEVER)
public ExpungeOutcome expunge(IIdType theId, ExpungeOptions theExpungeOptions) { public ExpungeOutcome expunge(IIdType theId, ExpungeOptions theExpungeOptions) {
BaseHasResource entity = readEntity(theId); BaseHasResource entity = readEntity(theId);
if (theId.hasVersionIdPart()) { if (theId.hasVersionIdPart()) {
@ -532,6 +534,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
} }
@Override @Override
@Transactional(propagation = Propagation.NEVER)
public ExpungeOutcome expunge(ExpungeOptions theExpungeOptions) { public ExpungeOutcome expunge(ExpungeOptions theExpungeOptions) {
ourLog.info("Beginning TYPE[{}] expunge operation", getResourceName()); ourLog.info("Beginning TYPE[{}] expunge operation", getResourceName());
@ -856,14 +859,8 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
T retVal = toResource(myResourceType, entity, null, null, false); T retVal = toResource(myResourceType, entity, null, null, false);
IPrimitiveType<Date> deleted; if (entity.getDeleted() != null) {
if (retVal instanceof IResource) { throw new ResourceGoneException("Resource was deleted at " + new InstantType(entity.getDeleted()).getValueAsString());
deleted = ResourceMetadataKeyEnum.DELETED_AT.get((IResource) retVal);
} else {
deleted = ResourceMetadataKeyEnum.DELETED_AT.get((IAnyResource) retVal);
}
if (deleted != null && !deleted.isEmpty()) {
throw new ResourceGoneException("Resource was deleted at " + deleted.getValueAsString());
} }
ourLog.debug("Processed read on {} in {}ms", theId.getValue(), w.getMillisAndRestart()); ourLog.debug("Processed read on {} in {}ms", theId.getValue(), w.getMillisAndRestart());

View File

@ -9,9 +9,9 @@ package ca.uhn.fhir.jpa.entity;
* Licensed under the Apache License, Version 2.0 (the "License"); * Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License. * you may not use this file except in compliance with the License.
* You may obtain a copy of the License at * You may obtain a copy of the License at
* *
* http://www.apache.org/licenses/LICENSE-2.0 * http://www.apache.org/licenses/LICENSE-2.0
* *
* Unless required by applicable law or agreed to in writing, software * Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, * distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@ -20,27 +20,17 @@ package ca.uhn.fhir.jpa.entity;
* #L% * #L%
*/ */
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.ForeignKey;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.Index;
import javax.persistence.JoinColumn;
import javax.persistence.OneToOne;
import javax.persistence.SequenceGenerator;
import javax.persistence.Table;
import javax.persistence.UniqueConstraint;
import org.hibernate.annotations.ColumnDefault; import org.hibernate.annotations.ColumnDefault;
import javax.persistence.*;
//@formatter:off //@formatter:off
@Entity() @Entity()
@Table(name = "HFJ_FORCED_ID", uniqueConstraints = { @Table(name = "HFJ_FORCED_ID", uniqueConstraints = {
@UniqueConstraint(name = "IDX_FORCEDID_RESID", columnNames = {"RESOURCE_PID"}), @UniqueConstraint(name = "IDX_FORCEDID_RESID", columnNames = {"RESOURCE_PID"}),
@UniqueConstraint(name = "IDX_FORCEDID_TYPE_RESID", columnNames = {"RESOURCE_TYPE", "RESOURCE_PID"}) @UniqueConstraint(name = "IDX_FORCEDID_TYPE_RESID", columnNames = {"RESOURCE_TYPE", "RESOURCE_PID"}),
}, indexes= { @UniqueConstraint(name = "IDX_FORCEDID_TYPE_FID", columnNames = {"RESOURCE_TYPE", "FORCED_ID"})
}, indexes = {
@Index(name = "IDX_FORCEDID_TYPE_FORCEDID", columnList = "RESOURCE_TYPE,FORCED_ID"), @Index(name = "IDX_FORCEDID_TYPE_FORCEDID", columnList = "RESOURCE_TYPE,FORCED_ID"),
}) })
//@formatter:on //@formatter:on
@ -57,11 +47,11 @@ public class ForcedId {
@Column(name = "PID") @Column(name = "PID")
private Long myId; private Long myId;
@JoinColumn(name = "RESOURCE_PID", nullable = false, updatable = false, foreignKey=@ForeignKey(name="FK_FORCEDID_RESOURCE")) @JoinColumn(name = "RESOURCE_PID", nullable = false, updatable = false, foreignKey = @ForeignKey(name = "FK_FORCEDID_RESOURCE"))
@OneToOne() @OneToOne()
private ResourceTable myResource; private ResourceTable myResource;
@Column(name = "RESOURCE_PID", nullable = false, updatable = false, insertable=false) @Column(name = "RESOURCE_PID", nullable = false, updatable = false, insertable = false)
private Long myResourcePid; private Long myResourcePid;
// This is updatable=true because it was added in 1.6 and needs to be set.. At some // This is updatable=true because it was added in 1.6 and needs to be set.. At some
@ -81,39 +71,39 @@ public class ForcedId {
return myForcedId; return myForcedId;
} }
public ResourceTable getResource() {
return myResource;
}
public Long getResourcePid() {
if (myResourcePid==null) {
return myResource.getId();
}
return myResourcePid;
}
public String getResourceType() {
return myResourceType;
}
public void setForcedId(String theForcedId) { public void setForcedId(String theForcedId) {
myForcedId = theForcedId; myForcedId = theForcedId;
} }
public ResourceTable getResource() {
return myResource;
}
public void setResource(ResourceTable theResource) { public void setResource(ResourceTable theResource) {
myResource = theResource; myResource = theResource;
} }
public void setResourcePid(Long theResourcePid) { public Long getResourcePid() {
myResourcePid = theResourcePid; if (myResourcePid == null) {
return myResource.getId();
}
return myResourcePid;
} }
public void setResourcePid(ResourceTable theResourcePid) { public void setResourcePid(ResourceTable theResourcePid) {
myResource = theResourcePid; myResource = theResourcePid;
} }
public String getResourceType() {
return myResourceType;
}
public void setResourceType(String theResourceType) { public void setResourceType(String theResourceType) {
myResourceType = theResourceType; myResourceType = theResourceType;
} }
public void setResourcePid(Long theResourcePid) {
myResourcePid = theResourcePid;
}
} }

View File

@ -170,6 +170,12 @@ public abstract class BaseJpaR4Test extends BaseJpaTest {
@Qualifier("myPatientDaoR4") @Qualifier("myPatientDaoR4")
protected IFhirResourceDaoPatient<Patient> myPatientDao; protected IFhirResourceDaoPatient<Patient> myPatientDao;
@Autowired @Autowired
protected IResourceTableDao myResourceTableDao;
@Autowired
protected IResourceHistoryTableDao myResourceHistoryTableDao;
@Autowired
protected IForcedIdDao myForcedIdDao;
@Autowired
@Qualifier("myCoverageDaoR4") @Qualifier("myCoverageDaoR4")
protected IFhirResourceDao<Coverage> myCoverageDao; protected IFhirResourceDao<Coverage> myCoverageDao;
@Autowired @Autowired
@ -188,10 +194,6 @@ public abstract class BaseJpaR4Test extends BaseJpaTest {
@Qualifier("myResourceProvidersR4") @Qualifier("myResourceProvidersR4")
protected Object myResourceProviders; protected Object myResourceProviders;
@Autowired @Autowired
protected IResourceTableDao myResourceTableDao;
@Autowired
protected IResourceHistoryTableDao myResourceHistoryTableDao;
@Autowired
protected IResourceTagDao myResourceTagDao; protected IResourceTagDao myResourceTagDao;
@Autowired @Autowired
protected ISearchCoordinatorSvc mySearchCoordinatorSvc; protected ISearchCoordinatorSvc mySearchCoordinatorSvc;
@ -284,7 +286,7 @@ public abstract class BaseJpaR4Test extends BaseJpaTest {
@Before @Before
public void beforeFlushFT() { public void beforeFlushFT() {
runInTransaction(()->{ runInTransaction(() -> {
FullTextEntityManager ftem = Search.getFullTextEntityManager(myEntityManager); FullTextEntityManager ftem = Search.getFullTextEntityManager(myEntityManager);
ftem.purgeAll(ResourceTable.class); ftem.purgeAll(ResourceTable.class);
ftem.purgeAll(ResourceIndexedSearchParamString.class); ftem.purgeAll(ResourceIndexedSearchParamString.class);
@ -315,6 +317,11 @@ public abstract class BaseJpaR4Test extends BaseJpaTest {
return myFhirCtx; return myFhirCtx;
} }
@Override
protected PlatformTransactionManager getTxManager() {
return myTxManager;
}
protected <T extends IBaseResource> T loadResourceFromClasspath(Class<T> type, String resourceName) throws IOException { protected <T extends IBaseResource> T loadResourceFromClasspath(Class<T> type, String resourceName) throws IOException {
InputStream stream = FhirResourceDaoDstu2SearchNoFtTest.class.getResourceAsStream(resourceName); InputStream stream = FhirResourceDaoDstu2SearchNoFtTest.class.getResourceAsStream(resourceName);
if (stream == null) { if (stream == null) {
@ -325,11 +332,6 @@ public abstract class BaseJpaR4Test extends BaseJpaTest {
return newJsonParser.parseResource(type, string); return newJsonParser.parseResource(type, string);
} }
@Override
protected PlatformTransactionManager getTxManager() {
return myTxManager;
}
@AfterClass @AfterClass
public static void afterClassClearContextBaseJpaR4Test() throws Exception { public static void afterClassClearContextBaseJpaR4Test() throws Exception {
ourValueSetDao.purgeCaches(); ourValueSetDao.purgeCaches();

View File

@ -2,19 +2,27 @@ package ca.uhn.fhir.jpa.provider.r4;
import ca.uhn.fhir.jpa.dao.DaoConfig; import ca.uhn.fhir.jpa.dao.DaoConfig;
import ca.uhn.fhir.jpa.dao.IFhirResourceDao; import ca.uhn.fhir.jpa.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.dao.data.IForcedIdDao;
import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao;
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
import ca.uhn.fhir.jpa.util.ExpungeOptions; import ca.uhn.fhir.jpa.util.ExpungeOptions;
import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException; import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException;
import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException; import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException;
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
import ca.uhn.fhir.util.TestUtil; import ca.uhn.fhir.util.TestUtil;
import org.hamcrest.Matchers;
import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.IdType;
import org.hl7.fhir.r4.model.Observation; import org.hl7.fhir.r4.model.Observation;
import org.hl7.fhir.r4.model.Patient; import org.hl7.fhir.r4.model.Patient;
import org.junit.After; import org.junit.After;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;
import org.springframework.beans.factory.annotation.Autowired;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.not;
import static org.junit.Assert.*; import static org.junit.Assert.*;
public class ExpungeR4Test extends BaseResourceProviderR4Test { public class ExpungeR4Test extends BaseResourceProviderR4Test {
@ -58,11 +66,8 @@ public class ExpungeR4Test extends BaseResourceProviderR4Test {
getDao(theId).read(theId); getDao(theId).read(theId);
} }
@Override
@Before
public void before() throws Exception {
super.before();
public void createStandardPatients() {
Patient p = new Patient(); Patient p = new Patient();
p.setId("PT-ONEVERSION"); p.setId("PT-ONEVERSION");
p.getMeta().addTag().setSystem("http://foo").setCode("bar"); p.getMeta().addTag().setSystem("http://foo").setCode("bar");
@ -105,7 +110,6 @@ public class ExpungeR4Test extends BaseResourceProviderR4Test {
o.setStatus(Observation.ObservationStatus.FINAL); o.setStatus(Observation.ObservationStatus.FINAL);
myDeletedObservationId = myObservationDao.create(o).getId(); myDeletedObservationId = myObservationDao.create(o).getId();
myDeletedObservationId = myObservationDao.delete(myDeletedObservationId).getId(); myDeletedObservationId = myObservationDao.delete(myDeletedObservationId).getId();
} }
private IFhirResourceDao<?> getDao(IIdType theId) { private IFhirResourceDao<?> getDao(IIdType theId) {
@ -126,6 +130,8 @@ public class ExpungeR4Test extends BaseResourceProviderR4Test {
@Test @Test
public void testExpungeInstanceOldVersionsAndDeleted() { public void testExpungeInstanceOldVersionsAndDeleted() {
createStandardPatients();
Patient p = new Patient(); Patient p = new Patient();
p.setId("PT-TWOVERSION"); p.setId("PT-TWOVERSION");
p.getMeta().addTag().setSystem("http://foo").setCode("bar"); p.getMeta().addTag().setSystem("http://foo").setCode("bar");
@ -151,8 +157,35 @@ public class ExpungeR4Test extends BaseResourceProviderR4Test {
assertGone(myDeletedObservationId); assertGone(myDeletedObservationId);
} }
@Test
public void testExpungeAllVersionsDeletesRow() {
// Create then delete
Patient p = new Patient();
p.setId("TEST");
p.getMeta().addTag().setSystem("http://foo").setCode("bar");
p.setActive(true);
p.addName().setFamily("FOO");
myPatientDao.update(p).getId();
myPatientDao.delete(new IdType("Patient/TEST"));
runInTransaction(()-> assertThat(myResourceTableDao.findAll(), not(empty())));
runInTransaction(()-> assertThat(myResourceHistoryTableDao.findAll(), not(empty())));
runInTransaction(()-> assertThat(myForcedIdDao.findAll(), not(empty())));
myPatientDao.expunge(new ExpungeOptions()
.setExpungeDeletedResources(true)
.setExpungeOldVersions(true));
runInTransaction(()-> assertThat(myResourceTableDao.findAll(), empty()));
runInTransaction(()-> assertThat(myResourceHistoryTableDao.findAll(), empty()));
runInTransaction(()-> assertThat(myForcedIdDao.findAll(), empty()));
}
@Test @Test
public void testExpungeInstanceVersionCurrentVersion() { public void testExpungeInstanceVersionCurrentVersion() {
createStandardPatients();
try { try {
myPatientDao.expunge(myTwoVersionPatientId.withVersion("2"), new ExpungeOptions() myPatientDao.expunge(myTwoVersionPatientId.withVersion("2"), new ExpungeOptions()
@ -166,6 +199,8 @@ public class ExpungeR4Test extends BaseResourceProviderR4Test {
@Test @Test
public void testExpungeInstanceVersionOldVersionsAndDeleted() { public void testExpungeInstanceVersionOldVersionsAndDeleted() {
createStandardPatients();
Patient p = new Patient(); Patient p = new Patient();
p.setId("PT-TWOVERSION"); p.setId("PT-TWOVERSION");
p.getMeta().addTag().setSystem("http://foo").setCode("bar"); p.getMeta().addTag().setSystem("http://foo").setCode("bar");
@ -193,6 +228,8 @@ public class ExpungeR4Test extends BaseResourceProviderR4Test {
@Test @Test
public void testExpungeSystemOldVersionsAndDeleted() { public void testExpungeSystemOldVersionsAndDeleted() {
createStandardPatients();
mySystemDao.expunge(new ExpungeOptions() mySystemDao.expunge(new ExpungeOptions()
.setExpungeDeletedResources(true) .setExpungeDeletedResources(true)
.setExpungeOldVersions(true)); .setExpungeOldVersions(true));
@ -212,6 +249,8 @@ public class ExpungeR4Test extends BaseResourceProviderR4Test {
@Test @Test
public void testExpungeTypeDeletedResources() { public void testExpungeTypeDeletedResources() {
createStandardPatients();
myPatientDao.expunge(new ExpungeOptions() myPatientDao.expunge(new ExpungeOptions()
.setExpungeDeletedResources(true) .setExpungeDeletedResources(true)
.setExpungeOldVersions(false)); .setExpungeOldVersions(false));
@ -231,6 +270,8 @@ public class ExpungeR4Test extends BaseResourceProviderR4Test {
@Test @Test
public void testExpungeTypeOldVersions() { public void testExpungeTypeOldVersions() {
createStandardPatients();
myPatientDao.expunge(new ExpungeOptions() myPatientDao.expunge(new ExpungeOptions()
.setExpungeDeletedResources(false) .setExpungeDeletedResources(false)
.setExpungeOldVersions(true)); .setExpungeOldVersions(true));
@ -251,6 +292,8 @@ public class ExpungeR4Test extends BaseResourceProviderR4Test {
@Test @Test
public void testExpungeSystemEverything() { public void testExpungeSystemEverything() {
createStandardPatients();
mySystemDao.expunge(new ExpungeOptions() mySystemDao.expunge(new ExpungeOptions()
.setExpungeEverything(true)); .setExpungeEverything(true));
@ -270,6 +313,8 @@ public class ExpungeR4Test extends BaseResourceProviderR4Test {
@Test @Test
public void testExpungeTypeOldVersionsAndDeleted() { public void testExpungeTypeOldVersionsAndDeleted() {
createStandardPatients();
myPatientDao.expunge(new ExpungeOptions() myPatientDao.expunge(new ExpungeOptions()
.setExpungeDeletedResources(true) .setExpungeDeletedResources(true)
.setExpungeOldVersions(true)); .setExpungeOldVersions(true));

View File

@ -144,3 +144,5 @@ drop index IDX_SP_TOKEN_UNQUAL;
16919 | public | hfj_spidx_token | 5.5205e+07 | 24763842560 | 17233928192 | 8192 | 7529906176 | 23 GB | 16 GB | 8192 bytes | 7181 MB 16919 | public | hfj_spidx_token | 5.5205e+07 | 24763842560 | 17233928192 | 8192 | 7529906176 | 23 GB | 16 GB | 8192 bytes | 7181 MB
drop index IDX_FORCEDID_TYPE_FORCEDID;
create index IDX_FORCEDID_TYPE_FID;

View File

@ -95,6 +95,19 @@
performance improvement on servers that are validating or executing FHIRPath repeatedly performance improvement on servers that are validating or executing FHIRPath repeatedly
under load. This module is used by default in the JPA server. under load. This module is used by default in the JPA server.
</action> </action>
<action type="fix">
An index in the JPA server on the HFJ_FORCED_ID table was incorrectly
not marked as unique. This meant that under heavy load it was possible to
create two resources with the same client-assigned ID.
</action>
<action type="fix">
The JPA server
<![CDATA[<code>$expunge</code>]]>
operation deleted components of an individual resource record in
separate database transactions, meaning that if an operation failed
unexpectedly resources could be left in a weird state. This has been
corrected.
</action>
</release> </release>
<release version="3.4.0" date="2018-05-28"> <release version="3.4.0" date="2018-05-28">
<action type="add"> <action type="add">