Standardize on one Transactional annotation (#4130)

* Standardize on one Transactional annotation

* Add changelog

* Disable checkstyle rule

* Fix up transactions

* Test fixes

* Test cleanup

* License headers

* One more dep bump

* Resolve commit issue

* Test fixes

* Test fix

* Dep bump

* Fix

* Fix test

* Version bump

* Test fixes
This commit is contained in:
James Agnew 2022-10-13 16:26:11 -04:00 committed by GitHub
parent 2e6b3d16d6
commit 422ef87a5d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
132 changed files with 534 additions and 288 deletions

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>6.2.0-PRE13-SNAPSHOT</version>
<version>6.2.0-PRE15-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.2.0-PRE13-SNAPSHOT</version>
<version>6.2.0-PRE15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.2.0-PRE13-SNAPSHOT</version>
<version>6.2.0-PRE15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.2.0-PRE13-SNAPSHOT</version>
<version>6.2.0-PRE15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -3,14 +3,14 @@
<modelVersion>4.0.0</modelVersion>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-bom</artifactId>
<version>6.2.0-PRE13-SNAPSHOT</version>
<version>6.2.0-PRE15-SNAPSHOT</version>
<packaging>pom</packaging>
<name>HAPI FHIR BOM</name>
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.2.0-PRE13-SNAPSHOT</version>
<version>6.2.0-PRE15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>6.2.0-PRE13-SNAPSHOT</version>
<version>6.2.0-PRE15-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.2.0-PRE13-SNAPSHOT</version>
<version>6.2.0-PRE15-SNAPSHOT</version>
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-cli</artifactId>
<version>6.2.0-PRE13-SNAPSHOT</version>
<version>6.2.0-PRE15-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.2.0-PRE13-SNAPSHOT</version>
<version>6.2.0-PRE15-SNAPSHOT</version>
<relativePath>../../hapi-deployable-pom</relativePath>
</parent>

View File

@ -106,7 +106,7 @@ public class FhirServerConfig {
@Primary
@Bean
public JpaTransactionManager hapiTransactionManager(EntityManagerFactory entityManagerFactory) {
public JpaTransactionManager transactionManager(EntityManagerFactory entityManagerFactory) {
JpaTransactionManager retVal = new JpaTransactionManager();
retVal.setEntityManagerFactory(entityManagerFactory);
return retVal;

View File

@ -102,7 +102,7 @@ public class FhirServerConfigDstu3 {
@Primary
@Bean
public JpaTransactionManager hapiTransactionManager(EntityManagerFactory entityManagerFactory) {
public JpaTransactionManager transactionManager(EntityManagerFactory entityManagerFactory) {
JpaTransactionManager retVal = new JpaTransactionManager();
retVal.setEntityManagerFactory(entityManagerFactory);
return retVal;

View File

@ -102,7 +102,7 @@ public class FhirServerConfigR4 {
@Primary
@Bean
public JpaTransactionManager hapiTransactionManager(EntityManagerFactory entityManagerFactory) {
public JpaTransactionManager transactionManager(EntityManagerFactory entityManagerFactory) {
JpaTransactionManager retVal = new JpaTransactionManager();
retVal.setEntityManagerFactory(entityManagerFactory);
return retVal;

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>6.2.0-PRE13-SNAPSHOT</version>
<version>6.2.0-PRE15-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.2.0-PRE13-SNAPSHOT</version>
<version>6.2.0-PRE15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.2.0-PRE13-SNAPSHOT</version>
<version>6.2.0-PRE15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.2.0-PRE13-SNAPSHOT</version>
<version>6.2.0-PRE15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>6.2.0-PRE13-SNAPSHOT</version>
<version>6.2.0-PRE15-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.2.0-PRE13-SNAPSHOT</version>
<version>6.2.0-PRE15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -0,0 +1,6 @@
---
type: fix
issue: 4130
title: "A transaction scoping bug in Batch2 has been resolved, preventing a crash on Postgres. We have also
standardized on the Spring @Transactional annotation, and removed use of the equivalent javax.transaction
annotation."

View File

@ -5,6 +5,11 @@
(dependent HAPI modules listed in brackets):
<ul>
<li>Woodstox (Base): 6.2.5 -> 6.3.1</li>
<li>Spring (JPA): 5.3.20 -> 5.3.23</li>
<li>Postgresql JDBC (JPA): 42.4.1 -> 42.5.0</li>
<li>H2 (JPA): 2.1.212 -> 2.1.214</li>
<li>Caffeine (JPA): 2.9.1 -> 3.1.1</li>
<li>Spring Boot (Boot): 2.6.7 -> 2.7.4</li>
</ul>
"

View File

@ -11,7 +11,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.2.0-PRE13-SNAPSHOT</version>
<version>6.2.0-PRE15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.2.0-PRE13-SNAPSHOT</version>
<version>6.2.0-PRE15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.2.0-PRE13-SNAPSHOT</version>
<version>6.2.0-PRE15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>
<modelVersion>4.0.0</modelVersion>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.2.0-PRE13-SNAPSHOT</version>
<version>6.2.0-PRE15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -30,6 +30,7 @@ import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
import org.springframework.context.annotation.Primary;
import org.springframework.transaction.PlatformTransactionManager;
@Configuration
@Import({
@ -38,14 +39,14 @@ import org.springframework.context.annotation.Primary;
public class JpaBatch2Config extends BaseBatch2Config {
@Bean
public IJobPersistence batch2JobInstancePersister(IBatch2JobInstanceRepository theJobInstanceRepository, IBatch2WorkChunkRepository theWorkChunkRepository) {
return new JpaJobPersistenceImpl(theJobInstanceRepository, theWorkChunkRepository);
public IJobPersistence batch2JobInstancePersister(IBatch2JobInstanceRepository theJobInstanceRepository, IBatch2WorkChunkRepository theWorkChunkRepository, PlatformTransactionManager theTransactionManager) {
return new JpaJobPersistenceImpl(theJobInstanceRepository, theWorkChunkRepository, theTransactionManager);
}
@Primary
@Bean
public IJobPersistence batch2JobInstancePersisterWrapper(IBatch2JobInstanceRepository theJobInstanceRepository, IBatch2WorkChunkRepository theWorkChunkRepository) {
IJobPersistence retVal = batch2JobInstancePersister(theJobInstanceRepository, theWorkChunkRepository);
public IJobPersistence batch2JobInstancePersisterWrapper(IBatch2JobInstanceRepository theJobInstanceRepository, IBatch2WorkChunkRepository theWorkChunkRepository, PlatformTransactionManager theTransactionManager) {
IJobPersistence retVal = batch2JobInstancePersister(theJobInstanceRepository, theWorkChunkRepository, theTransactionManager);
// Avoid H2 synchronization issues caused by
// https://github.com/h2database/h2database/issues/1808
if ("true".equals(System.getProperty("unit_test_mode"))) {

View File

@ -35,7 +35,6 @@ import ca.uhn.fhir.jpa.entity.Batch2JobInstanceEntity;
import ca.uhn.fhir.jpa.entity.Batch2WorkChunkEntity;
import ca.uhn.fhir.jpa.util.JobInstanceUtil;
import ca.uhn.fhir.model.api.PagingIterator;
import ca.uhn.fhir.narrative.BaseThymeleafNarrativeGenerator;
import org.apache.commons.collections4.ListUtils;
import org.apache.commons.lang3.Validate;
import org.slf4j.Logger;
@ -44,9 +43,13 @@ import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Sort;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.TransactionDefinition;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.transaction.support.TransactionTemplate;
import javax.annotation.Nonnull;
import javax.transaction.Transactional;
import java.util.ArrayList;
import java.util.Date;
import java.util.Iterator;
@ -59,24 +62,29 @@ import java.util.stream.Collectors;
import static org.apache.commons.lang3.StringUtils.isBlank;
@Transactional(Transactional.TxType.REQUIRES_NEW)
public class JpaJobPersistenceImpl implements IJobPersistence {
private static final Logger ourLog = LoggerFactory.getLogger(JpaJobPersistenceImpl.class);
private final IBatch2JobInstanceRepository myJobInstanceRepository;
private final IBatch2WorkChunkRepository myWorkChunkRepository;
private final TransactionTemplate myTxTemplate;
/**
* Constructor
*/
public JpaJobPersistenceImpl(IBatch2JobInstanceRepository theJobInstanceRepository, IBatch2WorkChunkRepository theWorkChunkRepository) {
public JpaJobPersistenceImpl(IBatch2JobInstanceRepository theJobInstanceRepository, IBatch2WorkChunkRepository theWorkChunkRepository, PlatformTransactionManager theTransactionManager) {
Validate.notNull(theJobInstanceRepository);
Validate.notNull(theWorkChunkRepository);
myJobInstanceRepository = theJobInstanceRepository;
myWorkChunkRepository = theWorkChunkRepository;
// TODO: JA replace with HapiTransactionManager in megascale ticket
myTxTemplate = new TransactionTemplate(theTransactionManager);
myTxTemplate.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);
}
@Override
@Transactional(propagation = Propagation.REQUIRES_NEW)
public String storeWorkChunk(BatchWorkChunk theBatchWorkChunk) {
Batch2WorkChunkEntity entity = new Batch2WorkChunkEntity();
entity.setId(UUID.randomUUID().toString());
@ -94,6 +102,7 @@ public class JpaJobPersistenceImpl implements IJobPersistence {
}
@Override
@Transactional(propagation = Propagation.REQUIRES_NEW)
public Optional<WorkChunk> fetchWorkChunkSetStartTimeAndMarkInProgress(String theChunkId) {
myWorkChunkRepository.updateChunkStatusForStart(theChunkId, new Date(), StatusEnum.IN_PROGRESS);
Optional<Batch2WorkChunkEntity> chunk = myWorkChunkRepository.findById(theChunkId);
@ -101,6 +110,7 @@ public class JpaJobPersistenceImpl implements IJobPersistence {
}
@Override
@Transactional(propagation = Propagation.REQUIRES_NEW)
public String storeNewInstance(JobInstance theInstance) {
Validate.isTrue(isBlank(theInstance.getInstanceId()));
@ -120,18 +130,22 @@ public class JpaJobPersistenceImpl implements IJobPersistence {
return entity.getId();
}
@Override
@Transactional(propagation = Propagation.REQUIRES_NEW)
public List<JobInstance> fetchInstancesByJobDefinitionIdAndStatus(String theJobDefinitionId, Set<StatusEnum> theRequestedStatuses, int thePageSize, int thePageIndex) {
PageRequest pageRequest = PageRequest.of(thePageIndex, thePageSize, Sort.Direction.ASC, "myCreateTime");
return toInstanceList(myJobInstanceRepository.fetchInstancesByJobDefinitionIdAndStatus(theJobDefinitionId, theRequestedStatuses, pageRequest));
}
@Override
@Transactional(propagation = Propagation.REQUIRES_NEW)
public List<JobInstance> fetchInstancesByJobDefinitionId(String theJobDefinitionId, int thePageSize, int thePageIndex) {
PageRequest pageRequest = PageRequest.of(thePageIndex, thePageSize, Sort.Direction.ASC, "myCreateTime");
return toInstanceList(myJobInstanceRepository.findInstancesByJobDefinitionId(theJobDefinitionId, pageRequest));
}
@Override
@Transactional(propagation = Propagation.REQUIRES_NEW)
public Page<JobInstance> fetchJobInstances(JobInstanceFetchRequest theRequest) {
PageRequest pageRequest = PageRequest.of(
theRequest.getPageStart(),
@ -150,11 +164,13 @@ public class JpaJobPersistenceImpl implements IJobPersistence {
@Override
@Nonnull
@Transactional(propagation = Propagation.REQUIRES_NEW)
public Optional<JobInstance> fetchInstance(String theInstanceId) {
return myJobInstanceRepository.findById(theInstanceId).map(this::toInstance);
}
@Override
@Transactional(propagation = Propagation.REQUIRES_NEW)
public List<JobInstance> fetchInstances(FetchJobInstancesRequest theRequest, int thePage, int theBatchSize) {
String definitionId = theRequest.getJobDefinition();
String params = theRequest.getParameters();
@ -182,6 +198,7 @@ public class JpaJobPersistenceImpl implements IJobPersistence {
}
@Override
@Transactional(propagation = Propagation.REQUIRES_NEW)
public List<JobInstance> fetchInstances(int thePageSize, int thePageIndex) {
// default sort is myCreateTime Asc
PageRequest pageRequest = PageRequest.of(thePageIndex, thePageSize, Sort.Direction.ASC, "myCreateTime");
@ -189,6 +206,7 @@ public class JpaJobPersistenceImpl implements IJobPersistence {
}
@Override
@Transactional(propagation = Propagation.REQUIRES_NEW)
public List<JobInstance> fetchRecentInstances(int thePageSize, int thePageIndex) {
PageRequest pageRequest = PageRequest.of(thePageIndex, thePageSize, Sort.Direction.DESC, "myCreateTime");
return myJobInstanceRepository.findAll(pageRequest).stream().map(this::toInstance).collect(Collectors.toList());
@ -203,11 +221,13 @@ public class JpaJobPersistenceImpl implements IJobPersistence {
}
@Override
@Transactional(propagation = Propagation.REQUIRES_NEW)
public void markWorkChunkAsErroredAndIncrementErrorCount(String theChunkId, String theErrorMessage) {
myWorkChunkRepository.updateChunkStatusAndIncrementErrorCountForEndError(theChunkId, new Date(), theErrorMessage, StatusEnum.ERRORED);
}
@Override
@Transactional(propagation = Propagation.REQUIRES_NEW)
public Optional<WorkChunk> markWorkChunkAsErroredAndIncrementErrorCount(MarkWorkChunkAsErrorRequest theParameters) {
markWorkChunkAsErroredAndIncrementErrorCount(theParameters.getChunkId(), theParameters.getErrorMsg());
Optional<Batch2WorkChunkEntity> op = myWorkChunkRepository.findById(theParameters.getChunkId());
@ -216,6 +236,7 @@ public class JpaJobPersistenceImpl implements IJobPersistence {
}
@Override
@Transactional(propagation = Propagation.REQUIRES_NEW)
public void markWorkChunkAsFailed(String theChunkId, String theErrorMessage) {
String errorMessage;
if (theErrorMessage.length() > Batch2WorkChunkEntity.ERROR_MSG_MAX_LENGTH) {
@ -228,11 +249,13 @@ public class JpaJobPersistenceImpl implements IJobPersistence {
}
@Override
@Transactional(propagation = Propagation.REQUIRES_NEW)
public void markWorkChunkAsCompletedAndClearData(String theChunkId, int theRecordsProcessed) {
myWorkChunkRepository.updateChunkStatusAndClearDataForEndSuccess(theChunkId, new Date(), theRecordsProcessed, StatusEnum.COMPLETED);
}
@Override
@Transactional(propagation = Propagation.REQUIRES_NEW)
public void markWorkChunksWithStatusAndWipeData(String theInstanceId, List<String> theChunkIds, StatusEnum theStatus, String theErrorMsg) {
List<List<String>> listOfListOfIds = ListUtils.partition(theChunkIds, 100);
for (List<String> idList : listOfListOfIds) {
@ -241,10 +264,14 @@ public class JpaJobPersistenceImpl implements IJobPersistence {
}
@Override
@Transactional(propagation = Propagation.REQUIRES_NEW)
public void incrementWorkChunkErrorCount(String theChunkId, int theIncrementBy) {
myWorkChunkRepository.incrementWorkChunkErrorCount(theChunkId, theIncrementBy);
}
/**
* Note: Not @Transactional because {@link #fetchChunks(String, boolean, int, int, Consumer)} starts a transaction
*/
@Override
public List<WorkChunk> fetchWorkChunksWithoutData(String theInstanceId, int thePageSize, int thePageIndex) {
ArrayList<WorkChunk> chunks = new ArrayList<>();
@ -253,25 +280,35 @@ public class JpaJobPersistenceImpl implements IJobPersistence {
}
private void fetchChunks(String theInstanceId, boolean theIncludeData, int thePageSize, int thePageIndex, Consumer<WorkChunk> theConsumer) {
List<Batch2WorkChunkEntity> chunks = myWorkChunkRepository.fetchChunks(PageRequest.of(thePageIndex, thePageSize), theInstanceId);
for (Batch2WorkChunkEntity chunk : chunks) {
theConsumer.accept(toChunk(chunk, theIncludeData));
}
myTxTemplate.executeWithoutResult(tx -> {
List<Batch2WorkChunkEntity> chunks = myWorkChunkRepository.fetchChunks(PageRequest.of(thePageIndex, thePageSize), theInstanceId);
for (Batch2WorkChunkEntity chunk : chunks) {
theConsumer.accept(toChunk(chunk, theIncludeData));
}
});
}
private void fetchChunksForStep(String theInstanceId, String theStepId, int thePageSize, int thePageIndex, Consumer<WorkChunk> theConsumer) {
List<Batch2WorkChunkEntity> chunks = myWorkChunkRepository.fetchChunksForStep(PageRequest.of(thePageIndex, thePageSize), theInstanceId, theStepId);
for (Batch2WorkChunkEntity chunk : chunks) {
theConsumer.accept(toChunk(chunk, true));
}
myTxTemplate.executeWithoutResult(tx -> {
List<Batch2WorkChunkEntity> chunks = myWorkChunkRepository.fetchChunksForStep(PageRequest.of(thePageIndex, thePageSize), theInstanceId, theStepId);
for (Batch2WorkChunkEntity chunk : chunks) {
theConsumer.accept(toChunk(chunk, true));
}
});
}
/**
* Note: Not @Transactional because the transaction happens in a lambda that's called outside of this method's scope
*/
@Override
public Iterator<WorkChunk> fetchAllWorkChunksIterator(String theInstanceId, boolean theWithData) {
return new PagingIterator<>((thePageIndex, theBatchSize, theConsumer) -> fetchChunks(theInstanceId, theWithData, theBatchSize, thePageIndex, theConsumer));
}
/**
* Note: Not @Transactional because the transaction happens in a lambda that's called outside of this method's scope
*/
@Override
public Iterator<WorkChunk> fetchAllWorkChunksForStepIterator(String theInstanceId, String theStepId) {
return new PagingIterator<>((thePageIndex, theBatchSize, theConsumer) -> fetchChunksForStep(theInstanceId, theStepId, theBatchSize, thePageIndex, theConsumer));
@ -284,6 +321,7 @@ public class JpaJobPersistenceImpl implements IJobPersistence {
* @return true if the status changed
*/
@Override
@Transactional(propagation = Propagation.REQUIRES_NEW)
public boolean updateInstance(JobInstance theInstance) {
// Separate updating the status so we have atomic information about whether the status is changing
int recordsChangedByStatusUpdate = myJobInstanceRepository.updateInstanceStatus(theInstance.getInstanceId(), theInstance.getStatus());
@ -312,23 +350,27 @@ public class JpaJobPersistenceImpl implements IJobPersistence {
}
@Override
@Transactional(propagation = Propagation.REQUIRES_NEW)
public void deleteInstanceAndChunks(String theInstanceId) {
myWorkChunkRepository.deleteAllForInstance(theInstanceId);
myJobInstanceRepository.deleteById(theInstanceId);
}
@Override
@Transactional(propagation = Propagation.REQUIRES_NEW)
public void deleteChunks(String theInstanceId) {
myWorkChunkRepository.deleteAllForInstance(theInstanceId);
}
@Override
@Transactional(propagation = Propagation.REQUIRES_NEW)
public boolean markInstanceAsCompleted(String theInstanceId) {
int recordsChanged = myJobInstanceRepository.updateInstanceStatus(theInstanceId, StatusEnum.COMPLETED);
return recordsChanged > 0;
}
@Override
@Transactional(propagation = Propagation.REQUIRES_NEW)
public JobOperationResultJson cancelInstance(String theInstanceId) {
int recordsChanged = myJobInstanceRepository.updateInstanceCancelled(theInstanceId, true);
String operationString = "Cancel job instance " + theInstanceId;

View File

@ -39,7 +39,9 @@ import org.springframework.transaction.PlatformTransactionManager;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import javax.persistence.PersistenceContextType;
import javax.transaction.Transactional;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
@ -61,7 +63,7 @@ public class DatabaseBlobBinaryStorageSvcImpl extends BaseBinaryStorageSvcImpl {
private DaoConfig myDaoConfig;
@Override
@Transactional(Transactional.TxType.REQUIRED)
@Transactional(propagation = Propagation.REQUIRED)
public StoredDetails storeBlob(IIdType theResourceId, String theBlobIdOrNull, String theContentType, InputStream theInputStream) throws IOException {
/*

View File

@ -46,10 +46,11 @@ import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Slice;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.support.TransactionTemplate;
import javax.annotation.PostConstruct;
import javax.transaction.Transactional;
import org.springframework.transaction.annotation.Transactional;
import java.util.Date;
import java.util.Optional;
@ -98,7 +99,7 @@ public class BulkDataExportJobSchedulingHelperImpl implements IBulkDataExportJob
}
@Override
@Transactional(Transactional.TxType.NEVER)
@Transactional(propagation = Propagation.NEVER)
public synchronized void cancelAndPurgeAllJobs() {
myTxTemplate.execute(t -> {
ourLog.info("Deleting all files");
@ -115,7 +116,7 @@ public class BulkDataExportJobSchedulingHelperImpl implements IBulkDataExportJob
* This method is called by the scheduler to run a pass of the
* generator
*/
@Transactional(value = Transactional.TxType.NEVER)
@Transactional(propagation = Propagation.NEVER)
@Override
public void purgeExpiredFiles() {
if (!myDaoConfig.isEnableTaskBulkExportJobExecution()) {

View File

@ -25,7 +25,7 @@ import ca.uhn.fhir.jpa.entity.BulkExportCollectionFileEntity;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import javax.transaction.Transactional;
import org.springframework.transaction.annotation.Transactional;
@Service
public class BulkExportCollectionFileDaoSvc {

View File

@ -49,11 +49,12 @@ import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.Slice;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.support.TransactionTemplate;
import javax.annotation.Nonnull;
import javax.annotation.PostConstruct;
import javax.transaction.Transactional;
import org.springframework.transaction.annotation.Transactional;
import java.util.List;
import java.util.Objects;
import java.util.Optional;
@ -156,7 +157,7 @@ public class BulkDataImportSvcImpl implements IBulkDataImportSvc {
/**
* To be called by the job scheduler
*/
@Transactional(value = Transactional.TxType.NEVER)
@Transactional(propagation = Propagation.NEVER)
@Override
public ActivateJobResult activateNextReadyJob() {
if (!myDaoConfig.isEnableTaskBulkImportJobExecution()) {

View File

@ -785,6 +785,8 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
if (!allDefs.contains(tag)) {
if (shouldDroppedTagBeRemovedOnUpdate(theRequest, tag)) {
theEntity.getTags().remove(tag);
} else if (HapiExtensions.EXT_SUBSCRIPTION_MATCHING_STRATEGY.equals(tag.getTag().getSystem())) {
theEntity.getTags().remove(tag);
}
}

View File

@ -54,7 +54,7 @@ import org.springframework.beans.factory.annotation.Autowired;
import javax.annotation.Nonnull;
import javax.annotation.PostConstruct;
import javax.transaction.Transactional;
import org.springframework.transaction.annotation.Transactional;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;

View File

@ -55,7 +55,9 @@ import org.springframework.beans.factory.annotation.Autowired;
import javax.annotation.Nullable;
import javax.annotation.PostConstruct;
import javax.transaction.Transactional;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
import java.util.Arrays;
import java.util.List;
import java.util.Optional;
@ -70,7 +72,7 @@ import static org.hl7.fhir.common.hapi.validation.support.ValidationConstants.LO
* validation resources (StructureDefinition, ValueSet, CodeSystem, etc.) from the resources
* persisted in the JPA server.
*/
@Transactional(value = Transactional.TxType.REQUIRED)
@Transactional(propagation = Propagation.REQUIRED)
public class JpaPersistedResourceValidationSupport implements IValidationSupport {
private static final Logger ourLog = LoggerFactory.getLogger(JpaPersistedResourceValidationSupport.class);

View File

@ -20,8 +20,9 @@ package ca.uhn.fhir.jpa.dao.data;
* #L%
*/
import javax.transaction.Transactional;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
@Transactional(Transactional.TxType.MANDATORY)
@Transactional(propagation = Propagation.MANDATORY)
public interface IHapiFhirJpaRepository {
}

View File

@ -49,7 +49,7 @@ import org.hl7.fhir.instance.model.api.IPrimitiveType;
import org.springframework.beans.factory.annotation.Autowired;
import javax.annotation.Nonnull;
import javax.transaction.Transactional;
import org.springframework.transaction.annotation.Transactional;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;

View File

@ -258,7 +258,8 @@ public class ResourceExpungeService implements IResourceExpungeService {
ourLog.info("Expunging current version of resource {}", resource.getIdDt().getValue());
deleteAllSearchParams(new ResourcePersistentId(resource.getResourceId()));
resource.getTags().clear();
myResourceTagDao.deleteByResourceId(resource.getId());
if (resource.getForcedId() != null) {
ForcedId forcedId = resource.getForcedId();

View File

@ -1,5 +1,25 @@
package ca.uhn.fhir.jpa.delete;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.interceptor.api.HookParams;
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
import ca.uhn.fhir.interceptor.api.Pointcut;

View File

@ -71,6 +71,7 @@ import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Slice;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.support.TransactionTemplate;
import javax.annotation.Nonnull;
@ -84,7 +85,7 @@ import javax.persistence.criteria.Join;
import javax.persistence.criteria.JoinType;
import javax.persistence.criteria.Predicate;
import javax.persistence.criteria.Root;
import javax.transaction.Transactional;
import org.springframework.transaction.annotation.Transactional;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
@ -450,7 +451,7 @@ public class JpaPackageCache extends BasePackageCacheManager implements IHapiPac
}
@Override
@Transactional(Transactional.TxType.NEVER)
@Transactional(propagation = Propagation.NEVER)
public NpmPackage installPackage(PackageInstallationSpec theInstallationSpec) throws IOException {
Validate.notBlank(theInstallationSpec.getName(), "thePackageId must not be blank");
Validate.notBlank(theInstallationSpec.getVersion(), "thePackageVersion must not be blank");

View File

@ -42,7 +42,7 @@ import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.support.TransactionTemplate;
import javax.annotation.PostConstruct;
import javax.transaction.Transactional;
import org.springframework.transaction.annotation.Transactional;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.TimeUnit;

View File

@ -41,9 +41,10 @@ import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Slice;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.TransactionDefinition;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.support.TransactionTemplate;
import javax.transaction.Transactional;
import org.springframework.transaction.annotation.Transactional;
import java.time.Instant;
import java.util.Collection;
import java.util.Date;
@ -87,7 +88,7 @@ public class DatabaseSearchCacheSvcImpl implements ISearchCacheSvc {
myCutoffSlack = theCutoffSlack;
}
@Transactional(Transactional.TxType.REQUIRED)
@Transactional(propagation = Propagation.REQUIRED)
@Override
public Search save(Search theSearch) {
Search newSearch = mySearchDao.save(theSearch);
@ -95,7 +96,7 @@ public class DatabaseSearchCacheSvcImpl implements ISearchCacheSvc {
}
@Override
@Transactional(Transactional.TxType.REQUIRED)
@Transactional(propagation = Propagation.REQUIRED)
public Optional<Search> fetchByUuid(String theUuid) {
Validate.notBlank(theUuid);
return mySearchDao.findByUuidAndFetchIncludes(theUuid);
@ -110,7 +111,7 @@ public class DatabaseSearchCacheSvcImpl implements ISearchCacheSvc {
}
@Override
@Transactional(Transactional.TxType.NEVER)
@Transactional(propagation = Propagation.NEVER)
public Optional<Search> tryToMarkSearchAsInProgress(Search theSearch) {
ourLog.trace("Going to try to change search status from {} to {}", theSearch.getStatus(), SearchStatusEnum.LOADING);
try {
@ -152,7 +153,7 @@ public class DatabaseSearchCacheSvcImpl implements ISearchCacheSvc {
return Optional.empty();
}
@Transactional(Transactional.TxType.NEVER)
@Transactional(propagation = Propagation.NEVER)
@Override
public void pollForStaleSearchesAndDeleteThem() {
if (!myDaoConfig.isExpireSearchResults()) {

View File

@ -30,7 +30,8 @@ import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Pageable;
import javax.transaction.Transactional;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
import java.util.Collections;
import java.util.List;
@ -43,7 +44,7 @@ public class DatabaseSearchResultCacheSvcImpl implements ISearchResultCacheSvc {
private ISearchResultDao mySearchResultDao;
@Override
@Transactional(Transactional.TxType.REQUIRED)
@Transactional(propagation = Propagation.REQUIRED)
public List<ResourcePersistentId> fetchResultPids(Search theSearch, int theFrom, int theTo) {
final Pageable page = toPage(theFrom, theTo);
if (page == null) {
@ -60,7 +61,7 @@ public class DatabaseSearchResultCacheSvcImpl implements ISearchResultCacheSvc {
}
@Override
@Transactional(Transactional.TxType.REQUIRED)
@Transactional(propagation = Propagation.REQUIRED)
public List<ResourcePersistentId> fetchAllResultPids(Search theSearch) {
List<Long> retVal = mySearchResultDao.findWithSearchPidOrderIndependent(theSearch.getId());
ourLog.trace("fetchAllResultPids returned {} pids", retVal.size());
@ -68,7 +69,7 @@ public class DatabaseSearchResultCacheSvcImpl implements ISearchResultCacheSvc {
}
@Override
@Transactional(Transactional.TxType.REQUIRED)
@Transactional(propagation = Propagation.REQUIRED)
public void storeResults(Search theSearch, List<ResourcePersistentId> thePreviouslyStoredResourcePids, List<ResourcePersistentId> theNewResourcePids) {
List<SearchResult> resultsToSave = Lists.newArrayList();

View File

@ -52,6 +52,7 @@ import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Slice;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.TransactionDefinition;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.support.TransactionCallback;
import org.springframework.transaction.support.TransactionTemplate;
@ -61,7 +62,7 @@ import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import javax.persistence.PersistenceContextType;
import javax.persistence.Query;
import javax.transaction.Transactional;
import org.springframework.transaction.annotation.Transactional;
import java.util.Collection;
import java.util.Date;
import java.util.List;
@ -153,13 +154,13 @@ public class ResourceReindexingSvcImpl implements IResourceReindexingSvc {
}
@Override
@Transactional(Transactional.TxType.REQUIRED)
@Transactional(propagation = Propagation.REQUIRED)
public Long markAllResourcesForReindexing() {
return markAllResourcesForReindexing(null);
}
@Override
@Transactional(Transactional.TxType.REQUIRED)
@Transactional(propagation = Propagation.REQUIRED)
public Long markAllResourcesForReindexing(String theType) {
String typeDesc;
@ -201,7 +202,7 @@ public class ResourceReindexingSvcImpl implements IResourceReindexingSvc {
}
@Override
@Transactional(Transactional.TxType.NEVER)
@Transactional(propagation = Propagation.NEVER)
public Integer runReindexingPass() {
if (myDaoConfig.isSchedulingDisabled() || !myDaoConfig.isEnableTaskPreExpandValueSets()) {
return null;

View File

@ -422,7 +422,6 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc {
}
@Override
@Transactional
public ValueSet expandValueSet(@Nullable ValueSetExpansionOptions theExpansionOptions, @Nonnull String theValueSetCanonicalUrl) {
ValueSet valueSet = fetchCanonicalValueSetFromCompleteContext(theValueSetCanonicalUrl);
if (valueSet == null) {
@ -433,16 +432,15 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc {
}
@Override
@Transactional(propagation = Propagation.REQUIRED)
public ValueSet expandValueSet(@Nullable ValueSetExpansionOptions theExpansionOptions, @Nonnull ValueSet theValueSetToExpand) {
String filter = null;
if (theExpansionOptions != null) {
filter = theExpansionOptions.getFilter();
}
return expandValueSet(theExpansionOptions, theValueSetToExpand, ExpansionFilter.fromFilterString(filter));
return doExpandValueSet(theExpansionOptions, theValueSetToExpand, ExpansionFilter.fromFilterString(filter));
}
private ValueSet expandValueSet(@Nullable ValueSetExpansionOptions theExpansionOptions, ValueSet theValueSetToExpand, ExpansionFilter theFilter) {
private ValueSet doExpandValueSet(@Nullable ValueSetExpansionOptions theExpansionOptions, ValueSet theValueSetToExpand, ExpansionFilter theFilter) {
ValidateUtil.isNotNullOrThrowUnprocessableEntity(theValueSetToExpand, "ValueSet to expand can not be null");
ValueSetExpansionOptions expansionOptions = provideExpansionOptions(theExpansionOptions);
@ -461,7 +459,9 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc {
accumulator.addParameter().setName("count").setValue(new IntegerType(count));
}
expandValueSetIntoAccumulator(theValueSetToExpand, theExpansionOptions, accumulator, theFilter, true);
myTxTemplate.executeWithoutResult(tx-> {
expandValueSetIntoAccumulator(theValueSetToExpand, theExpansionOptions, accumulator, theFilter, true);
});
if (accumulator.getTotalConcepts() != null) {
accumulator.setTotal(accumulator.getTotalConcepts());
@ -506,7 +506,7 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc {
ourLog.debug("ValueSet is not present in terminology tables. Will perform in-memory expansion without parameters. {}", getValueSetInfo(theValueSetToExpand));
String msg = myContext.getLocalizer().getMessage(BaseTermReadSvcImpl.class, "valueSetExpandedUsingInMemoryExpansion", getValueSetInfo(theValueSetToExpand));
theAccumulator.addMessage(msg);
expandValueSet(theExpansionOptions, theValueSetToExpand, theAccumulator, theFilter);
doExpandValueSet(theExpansionOptions, theValueSetToExpand, theAccumulator, theFilter);
return;
}
@ -517,7 +517,7 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc {
if (termValueSet.getExpansionStatus() != TermValueSetPreExpansionStatusEnum.EXPANDED) {
String msg = myContext.getLocalizer().getMessage(BaseTermReadSvcImpl.class, "valueSetNotYetExpanded", getValueSetInfo(theValueSetToExpand), termValueSet.getExpansionStatus().name(), termValueSet.getExpansionStatus().getDescription());
theAccumulator.addMessage(msg);
expandValueSet(theExpansionOptions, theValueSetToExpand, theAccumulator, theFilter);
doExpandValueSet(theExpansionOptions, theValueSetToExpand, theAccumulator, theFilter);
return;
}
@ -734,13 +734,16 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc {
}
@Override
@Transactional(propagation = Propagation.REQUIRED)
public void expandValueSet(ValueSetExpansionOptions theExpansionOptions, ValueSet theValueSetToExpand, IValueSetConceptAccumulator theValueSetCodeAccumulator) {
expandValueSet(theExpansionOptions, theValueSetToExpand, theValueSetCodeAccumulator, ExpansionFilter.NO_FILTER);
doExpandValueSet(theExpansionOptions, theValueSetToExpand, theValueSetCodeAccumulator, ExpansionFilter.NO_FILTER);
}
/**
* Note: Not transactional because specific calls within this method
* get executed in a transaction
*/
@SuppressWarnings("ConstantConditions")
private void expandValueSet(ValueSetExpansionOptions theExpansionOptions, ValueSet theValueSetToExpand, IValueSetConceptAccumulator theValueSetCodeAccumulator, @Nonnull ExpansionFilter theExpansionFilter) {
private void doExpandValueSet(ValueSetExpansionOptions theExpansionOptions, ValueSet theValueSetToExpand, IValueSetConceptAccumulator theValueSetCodeAccumulator, @Nonnull ExpansionFilter theExpansionFilter) {
Set<String> addedCodes = new HashSet<>();
StopWatch sw = new StopWatch();

View File

@ -8,6 +8,7 @@ import ca.uhn.fhir.context.support.ValidationSupportContext;
import ca.uhn.fhir.context.support.ValueSetExpansionOptions;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.term.api.ITermReadSvcDstu3;
import ca.uhn.fhir.jpa.term.ex.ExpansionTooCostlyException;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.util.ValidateUtil;
import org.hl7.fhir.convertors.advisors.impl.BaseAdvisor_30_40;
@ -22,6 +23,7 @@ import org.hl7.fhir.instance.model.api.IBaseDatatype;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.annotation.Transactional;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;

View File

@ -19,7 +19,7 @@ import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.transaction.PlatformTransactionManager;
import javax.annotation.Nonnull;
import javax.transaction.Transactional;
import org.springframework.transaction.annotation.Transactional;
/*
* #%L
@ -58,7 +58,6 @@ public class TermReadSvcR4 extends BaseTermReadSvcImpl implements ITermReadSvcR4
super.expandValueSet(theExpansionOptions, valueSetToExpand, theValueSetCodeAccumulator);
}
@Transactional(dontRollbackOn = {ExpansionTooCostlyException.class})
@Override
public IValidationSupport.ValueSetExpansionOutcome expandValueSet(ValidationSupportContext theValidationSupportContext, ValueSetExpansionOptions theExpansionOptions, @Nonnull IBaseResource theValueSetToExpand) {
ValueSet expanded = super.expandValueSet(theExpansionOptions, (ValueSet) theValueSetToExpand);

View File

@ -8,7 +8,6 @@ import ca.uhn.fhir.context.support.ValueSetExpansionOptions;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.term.api.ITermReadSvcR5;
import ca.uhn.fhir.jpa.term.ex.ExpansionTooCostlyException;
import ca.uhn.fhir.util.ValidateUtil;
import org.hl7.fhir.convertors.advisors.impl.BaseAdvisor_40_50;
import org.hl7.fhir.convertors.factory.VersionConvertorFactory_40_50;
@ -20,11 +19,9 @@ import org.hl7.fhir.r5.model.CodeableConcept;
import org.hl7.fhir.r5.model.Coding;
import org.hl7.fhir.r5.model.ValueSet;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.transaction.PlatformTransactionManager;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import javax.transaction.Transactional;
/*
* #%L
@ -50,11 +47,8 @@ public class TermReadSvcR5 extends BaseTermReadSvcImpl implements IValidationSup
@Autowired
private DaoRegistry myDaoRegistry;
@Autowired
private PlatformTransactionManager myTransactionManager;
@Override
@Transactional(dontRollbackOn = {ExpansionTooCostlyException.class})
public ValueSetExpansionOutcome expandValueSet(ValidationSupportContext theValidationSupportContext, ValueSetExpansionOptions theExpansionOptions, @Nonnull IBaseResource theValueSetToExpand) {
ValueSet valueSetToExpand = (ValueSet) theValueSetToExpand;
org.hl7.fhir.r4.model.ValueSet expandedR4 = super.expandValueSet(theExpansionOptions,

View File

@ -31,7 +31,7 @@ import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.CodeSystem;
import org.hl7.fhir.r4.model.ValueSet;
import javax.transaction.Transactional;
import org.springframework.transaction.annotation.Transactional;
import java.util.List;
/**

View File

@ -20,7 +20,7 @@ import org.hl7.fhir.r4.model.ValueSet;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import javax.transaction.Transactional;
import org.springframework.transaction.annotation.Transactional;
import java.util.List;
import java.util.Optional;
import java.util.Set;

View File

@ -19,6 +19,7 @@ import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import org.springframework.transaction.PlatformTransactionManager;
import java.util.ArrayList;
import java.util.Arrays;
@ -47,6 +48,8 @@ class JpaJobPersistenceImplTest {
IBatch2JobInstanceRepository myJobInstanceRepository;
@Mock
IBatch2WorkChunkRepository myWorkChunkRepository;
@Mock
PlatformTransactionManager myTxManager;
@InjectMocks
JpaJobPersistenceImpl mySvc;

View File

@ -7,7 +7,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.2.0-PRE13-SNAPSHOT</version>
<version>6.2.0-PRE15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.2.0-PRE13-SNAPSHOT</version>
<version>6.2.0-PRE15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.2.0-PRE13-SNAPSHOT</version>
<version>6.2.0-PRE15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -39,7 +39,7 @@ import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import javax.annotation.Nonnull;
import javax.transaction.Transactional;
import org.springframework.transaction.annotation.Transactional;
import java.util.List;
import java.util.Optional;
import java.util.stream.Collectors;

View File

@ -32,7 +32,7 @@ import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import javax.annotation.Nonnull;
import javax.transaction.Transactional;
import org.springframework.transaction.annotation.Transactional;
import java.util.Collection;
import java.util.List;
import java.util.stream.Collectors;

View File

@ -37,7 +37,7 @@ import org.slf4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import javax.transaction.Transactional;
import org.springframework.transaction.annotation.Transactional;
import java.util.ArrayList;
import java.util.List;

View File

@ -80,7 +80,7 @@ public class MdmLinkDaoSvcTest extends BaseMdmR4Test {
List<Long> expectedExpandedPids = mdmLinks.stream().map(MdmLink::getSourcePid).collect(Collectors.toList());
//SUT
List<MdmPidTuple> lists = myMdmLinkDao.expandPidsBySourcePidAndMatchResult(new ResourcePersistentId(mdmLinks.get(0).getSourcePid()), MdmMatchResultEnum.MATCH);
List<MdmPidTuple> lists = runInTransaction(()->myMdmLinkDao.expandPidsBySourcePidAndMatchResult(new ResourcePersistentId(mdmLinks.get(0).getSourcePid()), MdmMatchResultEnum.MATCH));
assertThat(lists, hasSize(10));

View File

@ -32,6 +32,7 @@ import static org.mockito.Mockito.when;
@ExtendWith(MockitoExtension.class)
class MdmLinkCreateSvcImplTest {
@SuppressWarnings("unused")
@Spy
FhirContext myFhirContext = FhirContext.forR4();
@Mock
@ -71,7 +72,7 @@ class MdmLinkCreateSvcImplTest {
}
@BeforeEach
private void setup() {
public void setup() {
ResourcePersistentId goldenId = new ResourcePersistentId(1L);
ResourcePersistentId sourceId = new ResourcePersistentId(2L);
when(myIdHelperService.getPidOrThrowException(any()))

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.2.0-PRE13-SNAPSHOT</version>
<version>6.2.0-PRE15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.2.0-PRE13-SNAPSHOT</version>
<version>6.2.0-PRE15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.2.0-PRE13-SNAPSHOT</version>
<version>6.2.0-PRE15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.2.0-PRE13-SNAPSHOT</version>
<version>6.2.0-PRE15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -37,15 +37,15 @@ import ca.uhn.fhir.rest.param.TokenParam;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import org.hl7.fhir.instance.model.api.IIdType;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
import org.mockito.internal.util.collections.ListUtil;
import org.simplejavamail.internal.util.ListUtil;
import java.util.List;
import java.util.stream.Collectors;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.contains;
@ -312,12 +312,12 @@ public class FhirResourceDaoDstu2SearchCustomSearchParamTest extends BaseJpaDstu
myGroupDao.create(g);
assertThat(myResourceLinkDao.findAll(), empty());
assertThat(ListUtil.filter(myResourceIndexedSearchParamTokenDao.findAll(), new ListUtil.Filter<ResourceIndexedSearchParamToken>() {
@Override
public boolean isOut(ResourceIndexedSearchParamToken object) {
return !object.getResourceType().equals("Group") || object.isMissing();
}
}), empty());
List<ResourceIndexedSearchParamToken> tokens = myResourceIndexedSearchParamTokenDao
.findAll()
.stream()
.filter(object -> !(!object.getResourceType().equals("Group") || object.isMissing()))
.collect(Collectors.toList());
assertThat(tokens, empty());
}
@Test

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.2.0-PRE13-SNAPSHOT</version>
<version>6.2.0-PRE15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -106,7 +106,7 @@ public class FhirResourceDaoDstu3ConceptMapTest extends BaseJpaDstu3Test {
public void testConceptMapFindTermConceptMapByUrl() {
Pageable page = PageRequest.of(0, 1);
List<TermConceptMap> theExpConceptMapList = myTermConceptMapDao.getTermConceptMapEntitiesByUrlOrderByMostRecentUpdate(page, CM_URL);
List<TermConceptMap> theExpConceptMapList = runInTransaction(()->myTermConceptMapDao.getTermConceptMapEntitiesByUrlOrderByMostRecentUpdate(page, CM_URL));
assertEquals(1, theExpConceptMapList.size());
assertEquals(CM_URL, theExpConceptMapList.get(0).getUrl());
@ -125,8 +125,8 @@ public class FhirResourceDaoDstu3ConceptMapTest extends BaseJpaDstu3Test {
myConceptMapDao.create(theConceptMap1);
myConceptMapDao.create(theConceptMap2);
Optional<TermConceptMap> theExpConceptMapV1 = myTermConceptMapDao.findTermConceptMapByUrlAndVersion(theUrl, "v1");
Optional<TermConceptMap> theExpConceptMapV2 = myTermConceptMapDao.findTermConceptMapByUrlAndVersion(theUrl, "v2");
Optional<TermConceptMap> theExpConceptMapV1 = runInTransaction(()->myTermConceptMapDao.findTermConceptMapByUrlAndVersion(theUrl, "v1"));
Optional<TermConceptMap> theExpConceptMapV2 = runInTransaction(()->myTermConceptMapDao.findTermConceptMapByUrlAndVersion(theUrl, "v2"));
assertTrue(theExpConceptMapV1.isPresent());
assertEquals(theUrl, theExpConceptMapV1.get().getUrl());
@ -138,7 +138,7 @@ public class FhirResourceDaoDstu3ConceptMapTest extends BaseJpaDstu3Test {
// should return the latest one which is v2
Pageable page = PageRequest.of(0, 1);
List<TermConceptMap> theExpSecondOne = myTermConceptMapDao.getTermConceptMapEntitiesByUrlOrderByMostRecentUpdate(page, theUrl);
List<TermConceptMap> theExpSecondOne = runInTransaction(()->myTermConceptMapDao.getTermConceptMapEntitiesByUrlOrderByMostRecentUpdate(page, theUrl));
assertEquals(1, theExpSecondOne.size());
assertEquals(theUrl, theExpSecondOne.get(0).getUrl());
@ -158,7 +158,7 @@ public class FhirResourceDaoDstu3ConceptMapTest extends BaseJpaDstu3Test {
myConceptMapDao.create(theConceptMap1);
myConceptMapDao.create(theConceptMap2);
Optional<TermConceptMap> theExpConceptMapV1 = myTermConceptMapDao.findTermConceptMapByUrlAndVersion(theUrl, "v1");
Optional<TermConceptMap> theExpConceptMapV1 = runInTransaction(()->myTermConceptMapDao.findTermConceptMapByUrlAndVersion(theUrl, "v1"));
assertTrue(theExpConceptMapV1.isPresent());
assertEquals(theUrl, theExpConceptMapV1.get().getUrl());
@ -166,7 +166,7 @@ public class FhirResourceDaoDstu3ConceptMapTest extends BaseJpaDstu3Test {
// should return the latest one which in this case is not versioned
Pageable page = PageRequest.of(0, 1);
List<TermConceptMap> theExpSecondOne = myTermConceptMapDao.getTermConceptMapEntitiesByUrlOrderByMostRecentUpdate(page, theUrl);
List<TermConceptMap> theExpSecondOne = runInTransaction(()->myTermConceptMapDao.getTermConceptMapEntitiesByUrlOrderByMostRecentUpdate(page, theUrl));
assertEquals(1, theExpSecondOne.size());
assertEquals(theUrl, theExpSecondOne.get(0).getUrl());

View File

@ -83,9 +83,9 @@ public class FhirResourceDaoDstu3ValueSetMultiVersionTest extends BaseJpaDstu3Te
public void testCreateVersionedValueSets() {
Map<ValueSetVersions, DaoMethodOutcome> myValueSets = createVersionedValueSets();
assertEquals(3, myTermValueSetDao.findTermValueSetByUrl(PageRequest.of(0, 10), URL_MY_VALUE_SET).size());
assertEquals(3, runInTransaction(()->myTermValueSetDao.findTermValueSetByUrl(PageRequest.of(0, 10), URL_MY_VALUE_SET).size()));
Optional<TermValueSet> optionalTermValueSet = myTermValueSetDao.findTermValueSetByUrlAndNullVersion(URL_MY_VALUE_SET);
Optional<TermValueSet> optionalTermValueSet = runInTransaction(()->myTermValueSetDao.findTermValueSetByUrlAndNullVersion(URL_MY_VALUE_SET));
assertTrue(optionalTermValueSet.isPresent());
Long nullVersion_resid = ((ResourceTable)myValueSets.get(ValueSetVersions.NULL).getEntity()).getId();
assertNotNull(nullVersion_resid);
@ -93,7 +93,7 @@ public class FhirResourceDaoDstu3ValueSetMultiVersionTest extends BaseJpaDstu3Te
assertEquals(nullVersion_resid, optionalTermValueSet.get().getResource().getId());
assertEquals("ValueSet_noVersion", optionalTermValueSet.get().getName());
optionalTermValueSet = myTermValueSetDao.findTermValueSetByUrlAndVersion(URL_MY_VALUE_SET, "v1");
optionalTermValueSet = runInTransaction(()->myTermValueSetDao.findTermValueSetByUrlAndVersion(URL_MY_VALUE_SET, "v1"));
assertTrue(optionalTermValueSet.isPresent());
Long v1Version_resid = ((ResourceTable)myValueSets.get(ValueSetVersions.V1).getEntity()).getId();
assertNotNull(v1Version_resid);
@ -101,7 +101,7 @@ public class FhirResourceDaoDstu3ValueSetMultiVersionTest extends BaseJpaDstu3Te
assertEquals(v1Version_resid, optionalTermValueSet.get().getResource().getId());
assertEquals("ValueSet_v1", optionalTermValueSet.get().getName());
optionalTermValueSet = myTermValueSetDao.findTermValueSetByUrlAndVersion(URL_MY_VALUE_SET, "v2");
optionalTermValueSet = runInTransaction(()->myTermValueSetDao.findTermValueSetByUrlAndVersion(URL_MY_VALUE_SET, "v2"));
assertTrue(optionalTermValueSet.isPresent());
Long v2Version_resid = ((ResourceTable)myValueSets.get(ValueSetVersions.V2).getEntity()).getId();
assertNotNull(v2Version_resid);
@ -115,15 +115,15 @@ public class FhirResourceDaoDstu3ValueSetMultiVersionTest extends BaseJpaDstu3Te
public void testUpdateVersionedValueSets() {
Map<ValueSetVersions, DaoMethodOutcome> myValueSets = createVersionedValueSets();
assertEquals(3, myTermValueSetDao.findTermValueSetByUrl(PageRequest.of(0, 10), URL_MY_VALUE_SET).size());
assertEquals(3, runInTransaction(()->myTermValueSetDao.findTermValueSetByUrl(PageRequest.of(0, 10), URL_MY_VALUE_SET).size()));
TermValueSet termValueSet = myTermValueSetDao.findTermValueSetByUrlAndNullVersion(URL_MY_VALUE_SET).orElseThrow(() -> new IllegalArgumentException("No TerValueSet found for " + URL_MY_VALUE_SET + " with null version"));
TermValueSet termValueSet = runInTransaction(()->myTermValueSetDao.findTermValueSetByUrlAndNullVersion(URL_MY_VALUE_SET).orElseThrow(() -> new IllegalArgumentException("No TerValueSet found for " + URL_MY_VALUE_SET + " with null version")));
assertEquals("ValueSet_noVersion", termValueSet.getName());
termValueSet = myTermValueSetDao.findTermValueSetByUrlAndVersion(URL_MY_VALUE_SET, "v1").orElseThrow(() -> new IllegalArgumentException("No TerValueSet found for " + URL_MY_VALUE_SET + " version v1"));
termValueSet = runInTransaction(()->myTermValueSetDao.findTermValueSetByUrlAndVersion(URL_MY_VALUE_SET, "v1").orElseThrow(() -> new IllegalArgumentException("No TerValueSet found for " + URL_MY_VALUE_SET + " version v1")));
assertEquals("ValueSet_v1", termValueSet.getName());
termValueSet = myTermValueSetDao.findTermValueSetByUrlAndVersion(URL_MY_VALUE_SET, "v2").orElseThrow(() -> new IllegalArgumentException("No TerValueSet found for " + URL_MY_VALUE_SET + " version v2"));
termValueSet = runInTransaction(()->myTermValueSetDao.findTermValueSetByUrlAndVersion(URL_MY_VALUE_SET, "v2").orElseThrow(() -> new IllegalArgumentException("No TerValueSet found for " + URL_MY_VALUE_SET + " version v2")));
assertEquals("ValueSet_v2", termValueSet.getName());
// Update ValueSets
@ -143,21 +143,21 @@ public class FhirResourceDaoDstu3ValueSetMultiVersionTest extends BaseJpaDstu3Te
Long v2Version_resid = ((ResourceTable)v2Version_update_outcome.getEntity()).getId();
// Verify that ValueSets were updated.
assertEquals(3, myTermValueSetDao.findTermValueSetByUrl(PageRequest.of(0, 10), URL_MY_VALUE_SET).size());
assertEquals(3, runInTransaction(()->myTermValueSetDao.findTermValueSetByUrl(PageRequest.of(0, 10), URL_MY_VALUE_SET).size()));
termValueSet = myTermValueSetDao.findTermValueSetByUrlAndNullVersion(URL_MY_VALUE_SET).orElseThrow(() -> new IllegalArgumentException("No TerValueSet found for " + URL_MY_VALUE_SET + " with null version"));
termValueSet = runInTransaction(()->myTermValueSetDao.findTermValueSetByUrlAndNullVersion(URL_MY_VALUE_SET).orElseThrow(() -> new IllegalArgumentException("No TerValueSet found for " + URL_MY_VALUE_SET + " with null version")));
assertNotNull(nullVersion_resid);
assertNotNull(termValueSet.getResource());
assertEquals(nullVersion_resid, termValueSet.getResource().getId());
assertEquals("ValueSet_noVersion_updated", termValueSet.getName());
termValueSet = myTermValueSetDao.findTermValueSetByUrlAndVersion(URL_MY_VALUE_SET, "v1").orElseThrow(() -> new IllegalArgumentException("No TerValueSet found for " + URL_MY_VALUE_SET + " version v1"));
termValueSet = runInTransaction(()->myTermValueSetDao.findTermValueSetByUrlAndVersion(URL_MY_VALUE_SET, "v1").orElseThrow(() -> new IllegalArgumentException("No TerValueSet found for " + URL_MY_VALUE_SET + " version v1")));
assertNotNull(v1Version_resid);
assertNotNull(termValueSet.getResource());
assertEquals(v1Version_resid, termValueSet.getResource().getId());
assertEquals("ValueSet_v1_updated", termValueSet.getName());
termValueSet = myTermValueSetDao.findTermValueSetByUrlAndVersion(URL_MY_VALUE_SET, "v2").orElseThrow(() -> new IllegalArgumentException("No TerValueSet found for " + URL_MY_VALUE_SET + " version v2"));
termValueSet = runInTransaction(()->myTermValueSetDao.findTermValueSetByUrlAndVersion(URL_MY_VALUE_SET, "v2").orElseThrow(() -> new IllegalArgumentException("No TerValueSet found for " + URL_MY_VALUE_SET + " version v2")));
assertNotNull(v2Version_resid);
assertNotNull(termValueSet.getResource());
assertEquals(v2Version_resid, termValueSet.getResource().getId());
@ -169,53 +169,53 @@ public class FhirResourceDaoDstu3ValueSetMultiVersionTest extends BaseJpaDstu3Te
public void testDeleteVersionedValueSets() {
Map<ValueSetVersions, DaoMethodOutcome> myValueSets = createVersionedValueSets();
assertEquals(3, myTermValueSetDao.findTermValueSetByUrl(PageRequest.of(0, 10), URL_MY_VALUE_SET).size());
assertEquals(3, runInTransaction(()->myTermValueSetDao.findTermValueSetByUrl(PageRequest.of(0, 10), URL_MY_VALUE_SET).size()));
TermValueSet termValueSet = myTermValueSetDao.findTermValueSetByUrlAndNullVersion(URL_MY_VALUE_SET).orElseThrow(() -> new IllegalArgumentException("No TerValueSet found for " + URL_MY_VALUE_SET + " with null version"));
TermValueSet termValueSet = runInTransaction(()->myTermValueSetDao.findTermValueSetByUrlAndNullVersion(URL_MY_VALUE_SET).orElseThrow(() -> new IllegalArgumentException("No TerValueSet found for " + URL_MY_VALUE_SET + " with null version")));
assertEquals("ValueSet_noVersion", termValueSet.getName());
termValueSet = myTermValueSetDao.findTermValueSetByUrlAndVersion(URL_MY_VALUE_SET, "v1").orElseThrow(() -> new IllegalArgumentException("No TerValueSet found for " + URL_MY_VALUE_SET + " version v1"));
termValueSet = runInTransaction(()->myTermValueSetDao.findTermValueSetByUrlAndVersion(URL_MY_VALUE_SET, "v1").orElseThrow(() -> new IllegalArgumentException("No TerValueSet found for " + URL_MY_VALUE_SET + " version v1")));
assertEquals("ValueSet_v1", termValueSet.getName());
termValueSet = myTermValueSetDao.findTermValueSetByUrlAndVersion(URL_MY_VALUE_SET, "v2").orElseThrow(() -> new IllegalArgumentException("No TerValueSet found for " + URL_MY_VALUE_SET + " version v2"));
termValueSet = runInTransaction(()->myTermValueSetDao.findTermValueSetByUrlAndVersion(URL_MY_VALUE_SET, "v2").orElseThrow(() -> new IllegalArgumentException("No TerValueSet found for " + URL_MY_VALUE_SET + " version v2")));
assertEquals("ValueSet_v2", termValueSet.getName());
// Delete ValueSets
myValueSetDao.delete(myValueSets.get(ValueSetVersions.NULL).getResource().getIdElement());
assertEquals(2, myTermValueSetDao.findTermValueSetByUrl(PageRequest.of(0, 10), URL_MY_VALUE_SET).size());
Optional<TermValueSet> optionalTermValueSet = myTermValueSetDao.findTermValueSetByUrlAndNullVersion(URL_MY_VALUE_SET);
assertEquals(2, runInTransaction(()->myTermValueSetDao.findTermValueSetByUrl(PageRequest.of(0, 10), URL_MY_VALUE_SET).size()));
Optional<TermValueSet> optionalTermValueSet = runInTransaction(()->myTermValueSetDao.findTermValueSetByUrlAndNullVersion(URL_MY_VALUE_SET));
if (optionalTermValueSet.isPresent()) {
fail();
}
assertNotNull(myTermValueSetDao.findTermValueSetByUrlAndVersion(URL_MY_VALUE_SET, "v1").orElseThrow(() -> new IllegalArgumentException("No TerValueSet found for " + URL_MY_VALUE_SET + " version v1")));
assertNotNull(myTermValueSetDao.findTermValueSetByUrlAndVersion(URL_MY_VALUE_SET, "v2").orElseThrow(() -> new IllegalArgumentException("No TerValueSet found for " + URL_MY_VALUE_SET + " version v2")));
assertNotNull(runInTransaction(()->myTermValueSetDao.findTermValueSetByUrlAndVersion(URL_MY_VALUE_SET, "v1").orElseThrow(() -> new IllegalArgumentException("No TerValueSet found for " + URL_MY_VALUE_SET + " version v1"))));
assertNotNull(runInTransaction(()->myTermValueSetDao.findTermValueSetByUrlAndVersion(URL_MY_VALUE_SET, "v2").orElseThrow(() -> new IllegalArgumentException("No TerValueSet found for " + URL_MY_VALUE_SET + " version v2"))));
myValueSetDao.delete(myValueSets.get(ValueSetVersions.V1).getResource().getIdElement());
assertEquals(1, myTermValueSetDao.findTermValueSetByUrl(PageRequest.of(0, 10), URL_MY_VALUE_SET).size());
optionalTermValueSet = myTermValueSetDao.findTermValueSetByUrlAndNullVersion(URL_MY_VALUE_SET);
assertEquals(1, runInTransaction(()->myTermValueSetDao.findTermValueSetByUrl(PageRequest.of(0, 10), URL_MY_VALUE_SET).size()));
optionalTermValueSet = runInTransaction(()->myTermValueSetDao.findTermValueSetByUrlAndNullVersion(URL_MY_VALUE_SET));
if (optionalTermValueSet.isPresent()) {
fail();
}
optionalTermValueSet = myTermValueSetDao.findTermValueSetByUrlAndVersion(URL_MY_VALUE_SET, "v1");
optionalTermValueSet = runInTransaction(()->myTermValueSetDao.findTermValueSetByUrlAndVersion(URL_MY_VALUE_SET, "v1"));
if (optionalTermValueSet.isPresent()) {
fail();
}
optionalTermValueSet = myTermValueSetDao.findTermValueSetByUrlAndVersion(URL_MY_VALUE_SET, "v2");
optionalTermValueSet = runInTransaction(()->myTermValueSetDao.findTermValueSetByUrlAndVersion(URL_MY_VALUE_SET, "v2"));
if (!optionalTermValueSet.isPresent()) {
fail("No TerValueSet found for " + URL_MY_VALUE_SET + " version v2");
}
myValueSetDao.delete(myValueSets.get(ValueSetVersions.V2).getResource().getIdElement());
assertEquals(0, myTermValueSetDao.findTermValueSetByUrl(PageRequest.of(0, 10), URL_MY_VALUE_SET).size());
optionalTermValueSet = myTermValueSetDao.findTermValueSetByUrlAndNullVersion(URL_MY_VALUE_SET);
assertEquals(0, runInTransaction(()->myTermValueSetDao.findTermValueSetByUrl(PageRequest.of(0, 10), URL_MY_VALUE_SET).size()));
optionalTermValueSet = runInTransaction(()->myTermValueSetDao.findTermValueSetByUrlAndNullVersion(URL_MY_VALUE_SET));
if (optionalTermValueSet.isPresent()) {
fail();
}
optionalTermValueSet = myTermValueSetDao.findTermValueSetByUrlAndVersion(URL_MY_VALUE_SET, "v1");
optionalTermValueSet = runInTransaction(()->myTermValueSetDao.findTermValueSetByUrlAndVersion(URL_MY_VALUE_SET, "v1"));
if (optionalTermValueSet.isPresent()) {
fail();
}
optionalTermValueSet = myTermValueSetDao.findTermValueSetByUrlAndVersion(URL_MY_VALUE_SET, "v2");
optionalTermValueSet = runInTransaction(()->myTermValueSetDao.findTermValueSetByUrlAndVersion(URL_MY_VALUE_SET, "v2"));
if (optionalTermValueSet.isPresent()) {
fail();
}

View File

@ -322,7 +322,7 @@ public class ResourceProviderDstu3ValueSetVersionedTest extends BaseResourceProv
loadAndPersistCodeSystemAndValueSet();
myTermSvc.preExpandDeferredValueSetsToTerminologyTables();
Slice<TermValueSet> page = myTermValueSetDao.findByExpansionStatus(PageRequest.of(0, 10), TermValueSetPreExpansionStatusEnum.EXPANDED);
Slice<TermValueSet> page = runInTransaction(()->myTermValueSetDao.findByExpansionStatus(PageRequest.of(0, 10), TermValueSetPreExpansionStatusEnum.EXPANDED));
assertEquals(2, page.getContent().size());
// Verify v1 ValueSet
@ -418,7 +418,7 @@ public class ResourceProviderDstu3ValueSetVersionedTest extends BaseResourceProv
loadAndPersistCodeSystemAndValueSet();
myTermSvc.preExpandDeferredValueSetsToTerminologyTables();
Slice<TermValueSet> page = myTermValueSetDao.findByExpansionStatus(PageRequest.of(0, 10), TermValueSetPreExpansionStatusEnum.EXPANDED);
Slice<TermValueSet> page = runInTransaction(()->myTermValueSetDao.findByExpansionStatus(PageRequest.of(0, 10), TermValueSetPreExpansionStatusEnum.EXPANDED));
assertEquals(2, page.getContent().size());
// Validate ValueSet v1
@ -585,7 +585,7 @@ public class ResourceProviderDstu3ValueSetVersionedTest extends BaseResourceProv
loadAndPersistCodeSystemAndValueSet();
myTermSvc.preExpandDeferredValueSetsToTerminologyTables();
Slice<TermValueSet> page = myTermValueSetDao.findByExpansionStatus(PageRequest.of(0, 10), TermValueSetPreExpansionStatusEnum.EXPANDED);
Slice<TermValueSet> page = runInTransaction(()->myTermValueSetDao.findByExpansionStatus(PageRequest.of(0, 10), TermValueSetPreExpansionStatusEnum.EXPANDED));
assertEquals(2, page.getContent().size());
// Check expansion of multi-versioned ValueSet with version 1

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.2.0-PRE13-SNAPSHOT</version>
<version>6.2.0-PRE15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -26,34 +26,33 @@ public class JobInstanceRepositoryTest extends BaseJpaR4Test {
private IBatch2JobInstanceRepository myJobInstanceRepository;
@Autowired
private IJobPersistence myJobPersistenceSvc;
private String myParams = "{\"param1\":\"value1\"}";
private String myJobDefinitionId = "my-job-def-id";
private String myInstanceId = "abc-123";
private static final String PARAMS = "{\"param1\":\"value1\"}";
private static final String JOB_DEFINITION_ID = "my-job-def-id";
private static final String INSTANCE_ID = "abc-123";
@Test
public void testSearchByJobParamsAndStatuses_SingleStatus() {
Set<StatusEnum> statuses = Set.of(StatusEnum.IN_PROGRESS);
List<Batch2JobInstanceEntity> instancesByJobIdParamsAndStatus = myJobInstanceRepository.findInstancesByJobIdParamsAndStatus(myJobDefinitionId, myParams, statuses, PageRequest.of(0, 10));
List<Batch2JobInstanceEntity> instancesByJobIdParamsAndStatus = runInTransaction(()->myJobInstanceRepository.findInstancesByJobIdParamsAndStatus(JOB_DEFINITION_ID, PARAMS, statuses, PageRequest.of(0, 10)));
assertThat(instancesByJobIdParamsAndStatus, hasSize(1));
}
@Test
public void testSearchByJobParamsAndStatuses_MultiStatus() {
Set<StatusEnum> statuses = Set.of(StatusEnum.IN_PROGRESS, StatusEnum.COMPLETED);
List<Batch2JobInstanceEntity> instances = myJobInstanceRepository.findInstancesByJobIdParamsAndStatus(myJobDefinitionId, myParams, statuses, PageRequest.of(0, 10));
List<Batch2JobInstanceEntity> instances = runInTransaction(()->myJobInstanceRepository.findInstancesByJobIdParamsAndStatus(JOB_DEFINITION_ID, PARAMS, statuses, PageRequest.of(0, 10)));
assertThat(instances, hasSize(2));
}
@Test
public void testSearchByJobParamsWithoutStatuses() {
List<Batch2JobInstanceEntity> instances = myJobInstanceRepository.findInstancesByJobIdAndParams(myJobDefinitionId, myParams, PageRequest.of(0, 10));
List<Batch2JobInstanceEntity> instances = runInTransaction(()->myJobInstanceRepository.findInstancesByJobIdAndParams(JOB_DEFINITION_ID, PARAMS, PageRequest.of(0, 10)));
assertThat(instances, hasSize(4));
}
@Test
public void testServiceLogicIsCorrectWhenNoStatusesAreUsed() {
FetchJobInstancesRequest request = new FetchJobInstancesRequest(myJobDefinitionId, myParams);
FetchJobInstancesRequest request = new FetchJobInstancesRequest(JOB_DEFINITION_ID, PARAMS);
List<JobInstance> jobInstances = myJobPersistenceSvc.fetchInstances(request, 0, 1000);
assertThat(jobInstances, hasSize(4));
}
@ -61,7 +60,7 @@ public class JobInstanceRepositoryTest extends BaseJpaR4Test {
@Test
public void testServiceLogicIsCorrectWithStatuses() {
//Given
FetchJobInstancesRequest request = new FetchJobInstancesRequest(myJobDefinitionId, myParams, StatusEnum.IN_PROGRESS, StatusEnum.COMPLETED);
FetchJobInstancesRequest request = new FetchJobInstancesRequest(JOB_DEFINITION_ID, PARAMS, StatusEnum.IN_PROGRESS, StatusEnum.COMPLETED);
//When
List<JobInstance> jobInstances = myJobPersistenceSvc.fetchInstances(request, 0, 1000);
@ -71,38 +70,38 @@ public class JobInstanceRepositoryTest extends BaseJpaR4Test {
}
@BeforeEach
private void beforeEach() {
public void beforeEach() {
//Create in-progress job.
Batch2JobInstanceEntity instance= new Batch2JobInstanceEntity();
instance.setId(myInstanceId);
instance.setId(INSTANCE_ID);
instance.setStatus(StatusEnum.IN_PROGRESS);
instance.setCreateTime(new Date());
instance.setDefinitionId(myJobDefinitionId);
instance.setParams(myParams);
instance.setDefinitionId(JOB_DEFINITION_ID);
instance.setParams(PARAMS);
myJobInstanceRepository.save(instance);
Batch2JobInstanceEntity completedInstance = new Batch2JobInstanceEntity();
completedInstance.setId(myInstanceId + "-2");
completedInstance.setId(INSTANCE_ID + "-2");
completedInstance.setStatus(StatusEnum.COMPLETED);
completedInstance.setCreateTime(new Date());
completedInstance.setDefinitionId(myJobDefinitionId);
completedInstance.setParams(myParams);
completedInstance.setDefinitionId(JOB_DEFINITION_ID);
completedInstance.setParams(PARAMS);
myJobInstanceRepository.save(completedInstance);
Batch2JobInstanceEntity cancelledInstance = new Batch2JobInstanceEntity();
cancelledInstance.setId(myInstanceId + "-3");
cancelledInstance.setId(INSTANCE_ID + "-3");
cancelledInstance.setStatus(StatusEnum.CANCELLED);
cancelledInstance.setCreateTime(new Date());
cancelledInstance.setDefinitionId(myJobDefinitionId);
cancelledInstance.setParams(myParams);
cancelledInstance.setDefinitionId(JOB_DEFINITION_ID);
cancelledInstance.setParams(PARAMS);
myJobInstanceRepository.save(cancelledInstance);
Batch2JobInstanceEntity failedInstance = new Batch2JobInstanceEntity();
failedInstance.setId(myInstanceId + "-4");
failedInstance.setId(INSTANCE_ID + "-4");
failedInstance.setStatus(StatusEnum.FAILED);
failedInstance.setCreateTime(new Date());
failedInstance.setDefinitionId(myJobDefinitionId);
failedInstance.setParams(myParams);
failedInstance.setDefinitionId(JOB_DEFINITION_ID);
failedInstance.setParams(PARAMS);
myJobInstanceRepository.save(failedInstance);
}

View File

@ -64,7 +64,6 @@ import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
import org.mockito.ArgumentCaptor;
import org.mockito.internal.util.collections.ListUtil;
import org.springframework.transaction.TransactionStatus;
import org.springframework.transaction.support.TransactionCallback;
import org.springframework.transaction.support.TransactionCallbackWithoutResult;
@ -690,12 +689,12 @@ public class FhirResourceDaoR4SearchCustomSearchParamTest extends BaseJpaR4Test
runInTransaction(() -> {
assertThat(myResourceLinkDao.findAll(), empty());
assertThat(ListUtil.filter(myResourceIndexedSearchParamTokenDao.findAll(), new ListUtil.Filter<ResourceIndexedSearchParamToken>() {
@Override
public boolean isOut(ResourceIndexedSearchParamToken object) {
return !object.getResourceType().equals("Group") || object.isMissing();
}
}), empty());
List<ResourceIndexedSearchParamToken> tokens = myResourceIndexedSearchParamTokenDao
.findAll()
.stream()
.filter(object -> !(!object.getResourceType().equals("Group") || object.isMissing()))
.collect(Collectors.toList());
assertThat(tokens, empty());
});
}

View File

@ -5,6 +5,7 @@ import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.delete.ThreadSafeResourceDeleterSvc;
import ca.uhn.fhir.jpa.provider.r4.BaseResourceProviderR4Test;
import ca.uhn.fhir.jpa.test.config.TestR4Config;
import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException;
@ -42,6 +43,13 @@ import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
/**
* Note that some tests in this class use the {@link ThreadSafeResourceDeleterSvc}
* which uses {@link org.springframework.transaction.annotation.Propagation#REQUIRES_NEW}
* propagation and therefore needs at least 2 connections free in the pool. Since the
* test config randomizes the number of connections available, these tests
* only run if more than one connection is going to be available.
*/
public class CascadingDeleteInterceptorTest extends BaseResourceProviderR4Test {
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(CascadingDeleteInterceptorTest.class);
@ -187,6 +195,10 @@ public class CascadingDeleteInterceptorTest extends BaseResourceProviderR4Test {
@Test
public void testDeleteCascading() throws IOException {
if (TestR4Config.getMaxThreads() == 1) {
return; // See class javadoc for explanation
}
createResources();
ourRestServer.getInterceptorService().registerInterceptor(myDeleteInterceptor);
@ -211,6 +223,10 @@ public class CascadingDeleteInterceptorTest extends BaseResourceProviderR4Test {
@Test
public void testDeleteCascadingWithOverridePathBasedReferentialIntegrityForDeletesInterceptorAlsoRegistered() throws IOException {
if (TestR4Config.getMaxThreads() == 1) {
return; // See class javadoc for explanation
}
ourRestServer.getInterceptorService().registerInterceptor(myOverridePathBasedReferentialIntegrityForDeletesInterceptor);
try {
@ -221,8 +237,8 @@ public class CascadingDeleteInterceptorTest extends BaseResourceProviderR4Test {
HttpDelete delete = new HttpDelete(ourServerBase + "/" + myPatientId.getValue() + "?" + Constants.PARAMETER_CASCADE_DELETE + "=" + Constants.CASCADE_DELETE + "&_pretty=true");
delete.addHeader(Constants.HEADER_ACCEPT, Constants.CT_FHIR_JSON_NEW);
try (CloseableHttpResponse response = ourHttpClient.execute(delete)) {
assertEquals(200, response.getStatusLine().getStatusCode());
String deleteResponse = IOUtils.toString(response.getEntity().getContent(), Charsets.UTF_8);
assertEquals(200, response.getStatusLine().getStatusCode(), deleteResponse);
ourLog.info("Response: {}", deleteResponse);
assertThat(deleteResponse, containsString("Cascaded delete to "));
}
@ -284,6 +300,10 @@ public class CascadingDeleteInterceptorTest extends BaseResourceProviderR4Test {
@Test
public void testDeleteCascadingByHeader() throws IOException {
if (TestR4Config.getMaxThreads() == 1) {
return; // See class javadoc for explanation
}
createResources();
ourRestServer.getInterceptorService().registerInterceptor(myDeleteInterceptor);

View File

@ -7,6 +7,7 @@ import ca.uhn.fhir.jpa.api.model.ExpungeOptions;
import ca.uhn.fhir.jpa.api.model.ExpungeOutcome;
import ca.uhn.fhir.jpa.dao.data.ISearchDao;
import ca.uhn.fhir.jpa.dao.data.ISearchResultDao;
import ca.uhn.fhir.jpa.dao.expunge.ExpungeService;
import ca.uhn.fhir.jpa.delete.ThreadSafeResourceDeleterSvc;
import ca.uhn.fhir.jpa.interceptor.CascadingDeleteInterceptor;
import ca.uhn.fhir.jpa.model.entity.NormalizedQuantitySearchLevel;
@ -83,6 +84,7 @@ public class ExpungeR4Test extends BaseResourceProviderR4Test {
public void afterDisableExpunge() {
myDaoConfig.setExpungeEnabled(new DaoConfig().isExpungeEnabled());
myDaoConfig.setAllowMultipleDelete(new DaoConfig().isAllowMultipleDelete());
myDaoConfig.setTagStorageMode(new DaoConfig().getTagStorageMode());
myModelConfig.setNormalizedQuantitySearchLevel(NormalizedQuantitySearchLevel.NORMALIZED_QUANTITY_SEARCH_NOT_SUPPORTED);
ourRestServer.getInterceptorService().unregisterInterceptorsIf(t -> t instanceof CascadingDeleteInterceptor);
@ -588,6 +590,28 @@ public class ExpungeR4Test extends BaseResourceProviderR4Test {
assertExpunged(myDeletedObservationId);
}
@Test
public void testExpungeByTypeAndNoId_NonVersionedTags() {
myDaoConfig.setTagStorageMode(DaoConfig.TagStorageModeEnum.NON_VERSIONED);
Patient p = new Patient();
p.setId("PT-DELETED");
p.getMeta().addTag().setSystem("http://foo").setCode("bar");
p.setActive(true);
myDeletedPatientId = myPatientDao.update(p).getId();
myDeletedPatientId = myPatientDao.delete(myDeletedPatientId).getId();
myCaptureQueriesListener.clear();
myExpungeService.expunge("Patient", null, new ExpungeOptions().setExpungeDeletedResources(true), null);
myCaptureQueriesListener.logAllQueries();
assertExpunged(myDeletedPatientId.withVersion("2"));
}
@Autowired
private ExpungeService myExpungeService;
@Test
public void testExpungeDeletedWhereResourceInSearchResults() {
createStandardPatients();

View File

@ -690,7 +690,7 @@ public class MultitenantServerR4Test extends BaseMultitenantResourceProviderR4Te
}
@BeforeEach
private void setBulkDataExportProvider() {
public void setBulkDataExportProvider() {
ourRestServer.registerProvider(myProvider);
}

View File

@ -23,6 +23,7 @@ import org.hl7.fhir.r4.model.Observation;
import org.hl7.fhir.r4.model.Organization;
import org.hl7.fhir.r4.model.Patient;
import org.hl7.fhir.r4.model.Subscription;
import org.jetbrains.annotations.NotNull;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
@ -115,12 +116,25 @@ public abstract class BaseSubscriptionsR4Test extends BaseResourceProviderR4Test
}
protected Subscription createSubscription(String theCriteria, String thePayload, Extension theExtension) {
String id = null;
return createSubscription(theCriteria, thePayload, theExtension, id);
}
@NotNull
protected Subscription createSubscription(String theCriteria, String thePayload, Extension theExtension, String id) {
Subscription subscription = newSubscription(theCriteria, thePayload);
if (theExtension != null) {
subscription.getChannel().addExtension(theExtension);
}
MethodOutcome methodOutcome = myClient.create().resource(subscription).execute();
MethodOutcome methodOutcome;
if (id != null) {
subscription.setId(id);
methodOutcome = myClient.update().resource(subscription).execute();
} else {
methodOutcome = myClient.create().resource(subscription).execute();
}
subscription.setId(methodOutcome.getId().getIdPart());
mySubscriptionIds.add(methodOutcome.getId());

View File

@ -68,6 +68,37 @@ public class RestHookTestR4Test extends BaseSubscriptionsR4Test {
myDaoConfig.setTriggerSubscriptionsForNonVersioningChanges(new DaoConfig().isTriggerSubscriptionsForNonVersioningChanges());
}
/**
* Make sure that if we delete a subscription, then reinstate it with a criteria
* that changes the database mode we don't store both versioning modes
*/
@Test
public void testReuseSubscriptionIdWithDifferentDatabaseMode() throws Exception {
myDaoConfig.setTagStorageMode(DaoConfig.TagStorageModeEnum.NON_VERSIONED);
String payload = "application/fhir+json";
IdType id = createSubscription("Observation?_has:DiagnosticReport:result:identifier=foo|IDENTIFIER", payload, null, "sub").getIdElement().toUnqualifiedVersionless();
waitForActivatedSubscriptionCount(1);
Subscription subscription = mySubscriptionDao.read(id, mySrd);
assertEquals(1, subscription.getMeta().getTag().size());
assertEquals("DATABASE", subscription.getMeta().getTag().get(0).getCode());
mySubscriptionDao.delete(id, mySrd);
waitForActivatedSubscriptionCount(0);
payload = "application/fhir+json";
id = createSubscription("Observation?", payload, null, "sub").getIdElement().toUnqualifiedVersionless();
waitForActivatedSubscriptionCount(1);
subscription = mySubscriptionDao.read(id, mySrd);
assertEquals(1, subscription.getMeta().getTag().size());
assertEquals("IN_MEMORY", subscription.getMeta().getTag().get(0).getCode());
}
@Test
public void testRestHookSubscriptionApplicationFhirJson() throws Exception {
String payload = "application/fhir+json";

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.2.0-PRE13-SNAPSHOT</version>
<version>6.2.0-PRE15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -82,9 +82,9 @@ public class FhirResourceDaoR5ValueSetMultiVersionTest extends BaseJpaR5Test {
public void testCreateVersionedValueSets() {
Map<ValueSetVersions, DaoMethodOutcome> myValueSets = createVersionedValueSets();
assertEquals(3, myTermValueSetDao.findTermValueSetByUrl(PageRequest.of(0, 10), URL_MY_VALUE_SET).size());
assertEquals(3, runInTransaction(()->myTermValueSetDao.findTermValueSetByUrl(PageRequest.of(0, 10), URL_MY_VALUE_SET).size()));
Optional<TermValueSet> optionalTermValueSet = myTermValueSetDao.findTermValueSetByUrlAndNullVersion(URL_MY_VALUE_SET);
Optional<TermValueSet> optionalTermValueSet = runInTransaction(()->myTermValueSetDao.findTermValueSetByUrlAndNullVersion(URL_MY_VALUE_SET));
assertTrue(optionalTermValueSet.isPresent());
Long nullVersion_resid = ((ResourceTable)myValueSets.get(ValueSetVersions.NULL).getEntity()).getId();
assertNotNull(nullVersion_resid);
@ -92,7 +92,7 @@ public class FhirResourceDaoR5ValueSetMultiVersionTest extends BaseJpaR5Test {
assertEquals(nullVersion_resid, optionalTermValueSet.get().getResource().getId());
assertEquals("ValueSet_noVersion", optionalTermValueSet.get().getName());
optionalTermValueSet = myTermValueSetDao.findTermValueSetByUrlAndVersion(URL_MY_VALUE_SET, "v1");
optionalTermValueSet = runInTransaction(()->myTermValueSetDao.findTermValueSetByUrlAndVersion(URL_MY_VALUE_SET, "v1"));
assertTrue(optionalTermValueSet.isPresent());
Long v1Version_resid = ((ResourceTable)myValueSets.get(ValueSetVersions.V1).getEntity()).getId();
assertNotNull(v1Version_resid);
@ -100,7 +100,7 @@ public class FhirResourceDaoR5ValueSetMultiVersionTest extends BaseJpaR5Test {
assertEquals(v1Version_resid, optionalTermValueSet.get().getResource().getId());
assertEquals("ValueSet_v1", optionalTermValueSet.get().getName());
optionalTermValueSet = myTermValueSetDao.findTermValueSetByUrlAndVersion(URL_MY_VALUE_SET, "v2");
optionalTermValueSet = runInTransaction(()->myTermValueSetDao.findTermValueSetByUrlAndVersion(URL_MY_VALUE_SET, "v2"));
assertTrue(optionalTermValueSet.isPresent());
Long v2Version_resid = ((ResourceTable)myValueSets.get(ValueSetVersions.V2).getEntity()).getId();
assertNotNull(v2Version_resid);
@ -114,15 +114,15 @@ public class FhirResourceDaoR5ValueSetMultiVersionTest extends BaseJpaR5Test {
public void testUpdateVersionedValueSets() {
Map<ValueSetVersions, DaoMethodOutcome> myValueSets = createVersionedValueSets();
assertEquals(3, myTermValueSetDao.findTermValueSetByUrl(PageRequest.of(0, 10), URL_MY_VALUE_SET).size());
assertEquals(3, runInTransaction(()->myTermValueSetDao.findTermValueSetByUrl(PageRequest.of(0, 10), URL_MY_VALUE_SET).size()));
TermValueSet termValueSet = myTermValueSetDao.findTermValueSetByUrlAndNullVersion(URL_MY_VALUE_SET).orElseThrow(() -> new IllegalArgumentException("No TerValueSet found for " + URL_MY_VALUE_SET + " with null version"));
TermValueSet termValueSet = runInTransaction(()->myTermValueSetDao.findTermValueSetByUrlAndNullVersion(URL_MY_VALUE_SET).orElseThrow(() -> new IllegalArgumentException("No TerValueSet found for " + URL_MY_VALUE_SET + " with null version")));
assertEquals("ValueSet_noVersion", termValueSet.getName());
termValueSet = myTermValueSetDao.findTermValueSetByUrlAndVersion(URL_MY_VALUE_SET, "v1").orElseThrow(() -> new IllegalArgumentException("No TerValueSet found for " + URL_MY_VALUE_SET + " version v1"));
termValueSet = runInTransaction(()->myTermValueSetDao.findTermValueSetByUrlAndVersion(URL_MY_VALUE_SET, "v1").orElseThrow(() -> new IllegalArgumentException("No TerValueSet found for " + URL_MY_VALUE_SET + " version v1")));
assertEquals("ValueSet_v1", termValueSet.getName());
termValueSet = myTermValueSetDao.findTermValueSetByUrlAndVersion(URL_MY_VALUE_SET, "v2").orElseThrow(() -> new IllegalArgumentException("No TerValueSet found for " + URL_MY_VALUE_SET + " version v2"));
termValueSet = runInTransaction(()->myTermValueSetDao.findTermValueSetByUrlAndVersion(URL_MY_VALUE_SET, "v2").orElseThrow(() -> new IllegalArgumentException("No TerValueSet found for " + URL_MY_VALUE_SET + " version v2")));
assertEquals("ValueSet_v2", termValueSet.getName());
// Update ValueSets
@ -142,21 +142,21 @@ public class FhirResourceDaoR5ValueSetMultiVersionTest extends BaseJpaR5Test {
Long v2Version_resid = ((ResourceTable)v2Version_update_outcome.getEntity()).getId();
// Verify that ValueSets were updated.
assertEquals(3, myTermValueSetDao.findTermValueSetByUrl(PageRequest.of(0, 10), URL_MY_VALUE_SET).size());
assertEquals(3, runInTransaction(()->myTermValueSetDao.findTermValueSetByUrl(PageRequest.of(0, 10), URL_MY_VALUE_SET).size()));
termValueSet = myTermValueSetDao.findTermValueSetByUrlAndNullVersion(URL_MY_VALUE_SET).orElseThrow(() -> new IllegalArgumentException("No TerValueSet found for " + URL_MY_VALUE_SET + " with null version"));
termValueSet = runInTransaction(()->myTermValueSetDao.findTermValueSetByUrlAndNullVersion(URL_MY_VALUE_SET).orElseThrow(() -> new IllegalArgumentException("No TerValueSet found for " + URL_MY_VALUE_SET + " with null version")));
assertNotNull(nullVersion_resid);
assertNotNull(termValueSet.getResource());
assertEquals(nullVersion_resid, termValueSet.getResource().getId());
assertEquals("ValueSet_noVersion_updated", termValueSet.getName());
termValueSet = myTermValueSetDao.findTermValueSetByUrlAndVersion(URL_MY_VALUE_SET, "v1").orElseThrow(() -> new IllegalArgumentException("No TerValueSet found for " + URL_MY_VALUE_SET + " version v1"));
termValueSet = runInTransaction(()->myTermValueSetDao.findTermValueSetByUrlAndVersion(URL_MY_VALUE_SET, "v1").orElseThrow(() -> new IllegalArgumentException("No TerValueSet found for " + URL_MY_VALUE_SET + " version v1")));
assertNotNull(v1Version_resid);
assertNotNull(termValueSet.getResource());
assertEquals(v1Version_resid, termValueSet.getResource().getId());
assertEquals("ValueSet_v1_updated", termValueSet.getName());
termValueSet = myTermValueSetDao.findTermValueSetByUrlAndVersion(URL_MY_VALUE_SET, "v2").orElseThrow(() -> new IllegalArgumentException("No TerValueSet found for " + URL_MY_VALUE_SET + " version v2"));
termValueSet = runInTransaction(()->myTermValueSetDao.findTermValueSetByUrlAndVersion(URL_MY_VALUE_SET, "v2").orElseThrow(() -> new IllegalArgumentException("No TerValueSet found for " + URL_MY_VALUE_SET + " version v2")));
assertNotNull(v2Version_resid);
assertNotNull(termValueSet.getResource());
assertEquals(v2Version_resid, termValueSet.getResource().getId());
@ -168,50 +168,50 @@ public class FhirResourceDaoR5ValueSetMultiVersionTest extends BaseJpaR5Test {
public void testDeleteVersionedValueSets() {
Map<ValueSetVersions, DaoMethodOutcome> myValueSets = createVersionedValueSets();
assertEquals(3, myTermValueSetDao.findTermValueSetByUrl(PageRequest.of(0, 10), URL_MY_VALUE_SET).size());
assertEquals(3, runInTransaction(()->myTermValueSetDao.findTermValueSetByUrl(PageRequest.of(0, 10), URL_MY_VALUE_SET).size()));
TermValueSet termValueSet = myTermValueSetDao.findTermValueSetByUrlAndNullVersion(URL_MY_VALUE_SET).orElseThrow(() -> new IllegalArgumentException("No TerValueSet found for " + URL_MY_VALUE_SET + " with null version"));
TermValueSet termValueSet = runInTransaction(()->myTermValueSetDao.findTermValueSetByUrlAndNullVersion(URL_MY_VALUE_SET).orElseThrow(() -> new IllegalArgumentException("No TerValueSet found for " + URL_MY_VALUE_SET + " with null version")));
assertEquals("ValueSet_noVersion", termValueSet.getName());
termValueSet = myTermValueSetDao.findTermValueSetByUrlAndVersion(URL_MY_VALUE_SET, "v1").orElseThrow(() -> new IllegalArgumentException("No TerValueSet found for " + URL_MY_VALUE_SET + " version v1"));
termValueSet = runInTransaction(()->myTermValueSetDao.findTermValueSetByUrlAndVersion(URL_MY_VALUE_SET, "v1").orElseThrow(() -> new IllegalArgumentException("No TerValueSet found for " + URL_MY_VALUE_SET + " version v1")));
assertEquals("ValueSet_v1", termValueSet.getName());
termValueSet = myTermValueSetDao.findTermValueSetByUrlAndVersion(URL_MY_VALUE_SET, "v2").orElseThrow(() -> new IllegalArgumentException("No TerValueSet found for " + URL_MY_VALUE_SET + " version v2"));
termValueSet = runInTransaction(()->myTermValueSetDao.findTermValueSetByUrlAndVersion(URL_MY_VALUE_SET, "v2").orElseThrow(() -> new IllegalArgumentException("No TerValueSet found for " + URL_MY_VALUE_SET + " version v2")));
assertEquals("ValueSet_v2", termValueSet.getName());
// Delete ValueSets
myValueSetDao.delete(myValueSets.get(ValueSetVersions.NULL).getResource().getIdElement());
assertEquals(2, myTermValueSetDao.findTermValueSetByUrl(PageRequest.of(0, 10), URL_MY_VALUE_SET).size());
Optional<TermValueSet> optionalTermValueSet = myTermValueSetDao.findTermValueSetByUrlAndNullVersion(URL_MY_VALUE_SET);
assertEquals(2, runInTransaction(()->myTermValueSetDao.findTermValueSetByUrl(PageRequest.of(0, 10), URL_MY_VALUE_SET).size()));
Optional<TermValueSet> optionalTermValueSet = runInTransaction(()->myTermValueSetDao.findTermValueSetByUrlAndNullVersion(URL_MY_VALUE_SET));
if (optionalTermValueSet.isPresent()) {
fail();
}
assertNotNull(myTermValueSetDao.findTermValueSetByUrlAndVersion(URL_MY_VALUE_SET, "v1").orElseThrow(() -> new IllegalArgumentException("No TerValueSet found for " + URL_MY_VALUE_SET + " version v1")));
assertNotNull(myTermValueSetDao.findTermValueSetByUrlAndVersion(URL_MY_VALUE_SET, "v2").orElseThrow(() -> new IllegalArgumentException("No TerValueSet found for " + URL_MY_VALUE_SET + " version v2")));
assertNotNull(runInTransaction(()->myTermValueSetDao.findTermValueSetByUrlAndVersion(URL_MY_VALUE_SET, "v1").orElseThrow(() -> new IllegalArgumentException("No TerValueSet found for " + URL_MY_VALUE_SET + " version v1"))));
assertNotNull(runInTransaction(()->myTermValueSetDao.findTermValueSetByUrlAndVersion(URL_MY_VALUE_SET, "v2").orElseThrow(() -> new IllegalArgumentException("No TerValueSet found for " + URL_MY_VALUE_SET + " version v2"))));
myValueSetDao.delete(myValueSets.get(ValueSetVersions.V1).getResource().getIdElement());
assertEquals(1, myTermValueSetDao.findTermValueSetByUrl(PageRequest.of(0, 10), URL_MY_VALUE_SET).size());
optionalTermValueSet = myTermValueSetDao.findTermValueSetByUrlAndNullVersion(URL_MY_VALUE_SET);
assertEquals(1, runInTransaction(()->myTermValueSetDao.findTermValueSetByUrl(PageRequest.of(0, 10), URL_MY_VALUE_SET).size()));
optionalTermValueSet = runInTransaction(()->myTermValueSetDao.findTermValueSetByUrlAndNullVersion(URL_MY_VALUE_SET));
if (optionalTermValueSet.isPresent()) {
fail();
}
optionalTermValueSet = myTermValueSetDao.findTermValueSetByUrlAndVersion(URL_MY_VALUE_SET, "v1");
optionalTermValueSet = runInTransaction(()->myTermValueSetDao.findTermValueSetByUrlAndVersion(URL_MY_VALUE_SET, "v1"));
if (optionalTermValueSet.isPresent()) {
fail();
}
assertNotNull(myTermValueSetDao.findTermValueSetByUrlAndVersion(URL_MY_VALUE_SET, "v2").orElseThrow(() -> new IllegalArgumentException("No TerValueSet found for " + URL_MY_VALUE_SET + " version v2")));
assertNotNull(runInTransaction(()->myTermValueSetDao.findTermValueSetByUrlAndVersion(URL_MY_VALUE_SET, "v2").orElseThrow(() -> new IllegalArgumentException("No TerValueSet found for " + URL_MY_VALUE_SET + " version v2"))));
myValueSetDao.delete(myValueSets.get(ValueSetVersions.V2).getResource().getIdElement());
assertEquals(0, myTermValueSetDao.findTermValueSetByUrl(PageRequest.of(0, 10), URL_MY_VALUE_SET).size());
optionalTermValueSet = myTermValueSetDao.findTermValueSetByUrlAndNullVersion(URL_MY_VALUE_SET);
assertEquals(0, runInTransaction(()->myTermValueSetDao.findTermValueSetByUrl(PageRequest.of(0, 10), URL_MY_VALUE_SET).size()));
optionalTermValueSet = runInTransaction(()->myTermValueSetDao.findTermValueSetByUrlAndNullVersion(URL_MY_VALUE_SET));
if (optionalTermValueSet.isPresent()) {
fail();
}
optionalTermValueSet = myTermValueSetDao.findTermValueSetByUrlAndVersion(URL_MY_VALUE_SET, "v1");
optionalTermValueSet = runInTransaction(()->myTermValueSetDao.findTermValueSetByUrlAndVersion(URL_MY_VALUE_SET, "v1"));
if (optionalTermValueSet.isPresent()) {
fail();
}
optionalTermValueSet = myTermValueSetDao.findTermValueSetByUrlAndVersion(URL_MY_VALUE_SET, "v2");
optionalTermValueSet = runInTransaction(()->myTermValueSetDao.findTermValueSetByUrlAndVersion(URL_MY_VALUE_SET, "v2"));
if (optionalTermValueSet.isPresent()) {
fail();
}

View File

@ -303,7 +303,7 @@ public class ResourceProviderR5ValueSetVersionedTest extends BaseResourceProvide
loadAndPersistCodeSystemAndValueSet();
await().until(() -> clearDeferredStorageQueue());
myTermSvc.preExpandDeferredValueSetsToTerminologyTables();
Slice<TermValueSet> page = myTermValueSetDao.findByExpansionStatus(PageRequest.of(0, 10), TermValueSetPreExpansionStatusEnum.EXPANDED);
Slice<TermValueSet> page = runInTransaction(()->myTermValueSetDao.findByExpansionStatus(PageRequest.of(0, 10), TermValueSetPreExpansionStatusEnum.EXPANDED));
assertEquals(2, page.getContent().size());
// Verify v1 ValueSet
@ -400,7 +400,7 @@ public class ResourceProviderR5ValueSetVersionedTest extends BaseResourceProvide
await().until(() -> clearDeferredStorageQueue());
myTermSvc.preExpandDeferredValueSetsToTerminologyTables();
Slice<TermValueSet> page = myTermValueSetDao.findByExpansionStatus(PageRequest.of(0, 10), TermValueSetPreExpansionStatusEnum.EXPANDED);
Slice<TermValueSet> page = runInTransaction(()->myTermValueSetDao.findByExpansionStatus(PageRequest.of(0, 10), TermValueSetPreExpansionStatusEnum.EXPANDED));
assertEquals(2, page.getContent().size());
// Validate ValueSet v1
@ -568,7 +568,7 @@ public class ResourceProviderR5ValueSetVersionedTest extends BaseResourceProvide
await().until(() -> clearDeferredStorageQueue());
myTermSvc.preExpandDeferredValueSetsToTerminologyTables();
Slice<TermValueSet> page = myTermValueSetDao.findByExpansionStatus(PageRequest.of(0, 10), TermValueSetPreExpansionStatusEnum.EXPANDED);
Slice<TermValueSet> page = runInTransaction(()->myTermValueSetDao.findByExpansionStatus(PageRequest.of(0, 10), TermValueSetPreExpansionStatusEnum.EXPANDED));
assertEquals(2, page.getContent().size());
// Check expansion of multi-versioned ValueSet with version 1

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.2.0-PRE13-SNAPSHOT</version>
<version>6.2.0-PRE15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -1,5 +1,25 @@
package ca.uhn.fhir.jpa.test.config;
/*-
* #%L
* HAPI FHIR JPA Server Test Utilities
* %%
* Copyright (C) 2014 - 2022 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import net.ttddyy.dsproxy.ExecutionInfo;
import net.ttddyy.dsproxy.QueryInfo;
import net.ttddyy.dsproxy.support.ProxyDataSourceBuilder;

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>6.2.0-PRE13-SNAPSHOT</version>
<version>6.2.0-PRE15-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -2,11 +2,13 @@ package ca.uhn.fhirtest.config;
import ca.uhn.fhir.batch2.jobs.config.Batch2JobsConfig;
import ca.uhn.fhir.interceptor.api.IInterceptorService;
import ca.uhn.fhir.jpa.api.config.ThreadPoolFactoryConfig;
import ca.uhn.fhir.jpa.batch2.JpaBatch2Config;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.jpa.subscription.channel.config.SubscriptionChannelConfig;
import ca.uhn.fhir.jpa.subscription.match.config.SubscriptionProcessorConfig;
import ca.uhn.fhir.jpa.subscription.match.config.WebsocketDispatcherConfig;
import ca.uhn.fhir.jpa.subscription.match.deliver.email.IEmailSender;
import ca.uhn.fhir.jpa.subscription.submit.config.SubscriptionSubmitterConfig;
import ca.uhn.fhir.rest.server.interceptor.IServerInterceptor;
import ca.uhn.fhir.rest.server.interceptor.LoggingInterceptor;
@ -23,7 +25,8 @@ import org.springframework.context.annotation.Import;
SubscriptionProcessorConfig.class,
SubscriptionSubmitterConfig.class,
JpaBatch2Config.class,
Batch2JobsConfig.class
Batch2JobsConfig.class,
ThreadPoolFactoryConfig.class
})
public class CommonConfig {
@ -53,6 +56,11 @@ public class CommonConfig {
return retVal;
}
@Bean
public IEmailSender emailSender() {
return new LoggingEmailSender();
}
/**
* This is a joke
* <p>

View File

@ -0,0 +1,15 @@
package ca.uhn.fhirtest.config;
import ca.uhn.fhir.jpa.subscription.match.deliver.email.EmailDetails;
import ca.uhn.fhir.jpa.subscription.match.deliver.email.IEmailSender;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class LoggingEmailSender implements IEmailSender {
private static final Logger ourLog = LoggerFactory.getLogger(LoggingEmailSender.class);
@Override
public void send(EmailDetails theDetails) {
ourLog.info("Not sending subscription email to: {}", theDetails.getTo());
}
}

View File

@ -122,7 +122,7 @@ public class TestDstu2Config {
@Primary
@Bean
public JpaTransactionManager hapiTransactionManager(EntityManagerFactory entityManagerFactory) {
public JpaTransactionManager transactionManager(EntityManagerFactory entityManagerFactory) {
JpaTransactionManager retVal = new JpaTransactionManager();
retVal.setEntityManagerFactory(entityManagerFactory);
return retVal;

View File

@ -189,7 +189,7 @@ public class TestDstu3Config {
@Bean
@Primary
public JpaTransactionManager hapiTransactionManager(EntityManagerFactory entityManagerFactory) {
public JpaTransactionManager transactionManager(EntityManagerFactory entityManagerFactory) {
JpaTransactionManager retVal = new JpaTransactionManager();
retVal.setEntityManagerFactory(entityManagerFactory);
return retVal;

View File

@ -183,7 +183,7 @@ public class TestR4Config {
@Bean
@Primary
public JpaTransactionManager hapiTransactionManager(EntityManagerFactory entityManagerFactory) {
public JpaTransactionManager transactionManager(EntityManagerFactory entityManagerFactory) {
JpaTransactionManager retVal = new JpaTransactionManager();
retVal.setEntityManagerFactory(entityManagerFactory);
return retVal;

View File

@ -190,7 +190,7 @@ public class TestR5Config {
@Bean
@Primary
public JpaTransactionManager hapiTransactionManager(EntityManagerFactory entityManagerFactory) {
public JpaTransactionManager transactionManager(EntityManagerFactory entityManagerFactory) {
JpaTransactionManager retVal = new JpaTransactionManager();
retVal.setEntityManagerFactory(entityManagerFactory);
return retVal;

View File

@ -7,7 +7,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.2.0-PRE13-SNAPSHOT</version>
<version>6.2.0-PRE15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -34,6 +34,7 @@ import org.hl7.fhir.instance.model.api.IIdType;
import org.slf4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import javax.annotation.Nonnull;
import java.util.HashSet;
@ -42,6 +43,7 @@ import java.util.Set;
import java.util.stream.Collectors;
@Service
@Transactional
public class MdmLinkExpandSvc implements IMdmLinkExpandSvc {
private static final Logger ourLog = Logs.getMdmTroubleshootingLog();

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.2.0-PRE13-SNAPSHOT</version>
<version>6.2.0-PRE15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.2.0-PRE13-SNAPSHOT</version>
<version>6.2.0-PRE15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.2.0-PRE13-SNAPSHOT</version>
<version>6.2.0-PRE15-SNAPSHOT</version>
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -191,7 +191,7 @@ public class FhirAutoConfiguration {
@Bean
@Primary
public PlatformTransactionManager hapiTransactionManager() {
public PlatformTransactionManager transactionManager() {
return new JpaTransactionManager(emf);
}

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-spring-boot-samples</artifactId>
<version>6.2.0-PRE13-SNAPSHOT</version>
<version>6.2.0-PRE15-SNAPSHOT</version>
</parent>
<artifactId>hapi-fhir-spring-boot-sample-client-apache</artifactId>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-spring-boot-samples</artifactId>
<version>6.2.0-PRE13-SNAPSHOT</version>
<version>6.2.0-PRE15-SNAPSHOT</version>
</parent>
<artifactId>hapi-fhir-spring-boot-sample-client-okhttp</artifactId>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-spring-boot-samples</artifactId>
<version>6.2.0-PRE13-SNAPSHOT</version>
<version>6.2.0-PRE15-SNAPSHOT</version>
</parent>
<artifactId>hapi-fhir-spring-boot-sample-server-jersey</artifactId>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-spring-boot</artifactId>
<version>6.2.0-PRE13-SNAPSHOT</version>
<version>6.2.0-PRE15-SNAPSHOT</version>
</parent>
<artifactId>hapi-fhir-spring-boot-samples</artifactId>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.2.0-PRE13-SNAPSHOT</version>
<version>6.2.0-PRE15-SNAPSHOT</version>
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>6.2.0-PRE13-SNAPSHOT</version>
<version>6.2.0-PRE15-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.2.0-PRE13-SNAPSHOT</version>
<version>6.2.0-PRE15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>6.2.0-PRE13-SNAPSHOT</version>
<version>6.2.0-PRE15-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>
<modelVersion>4.0.0</modelVersion>

Some files were not shown because too many files have changed in this diff Show More