Merge branch 'master' into 2973-CLI_option_help_fails

This commit is contained in:
katie_smilecdr 2021-09-09 17:15:20 -04:00
commit e6be758d65
103 changed files with 883 additions and 308 deletions

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version>
<version>5.6.0-PRE4-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version>
<version>5.6.0-PRE4-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version>
<version>5.6.0-PRE4-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -3,14 +3,14 @@
<modelVersion>4.0.0</modelVersion>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-bom</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version>
<version>5.6.0-PRE4-SNAPSHOT</version>
<packaging>pom</packaging>
<name>HAPI FHIR BOM</name>
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version>
<version>5.6.0-PRE4-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version>
<version>5.6.0-PRE4-SNAPSHOT</version>
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-cli</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version>
<version>5.6.0-PRE4-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version>
<version>5.6.0-PRE4-SNAPSHOT</version>
<relativePath>../../hapi-deployable-pom</relativePath>
</parent>

View File

@ -30,6 +30,7 @@ import org.apache.commons.lang3.time.DateUtils;
import org.springframework.beans.factory.annotation.Autowire;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
@ -65,8 +66,8 @@ public class FhirServerConfig extends BaseJavaConfigDstu2 {
@Override
@Bean
public LocalContainerEntityManagerFactoryBean entityManagerFactory() {
LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory();
public LocalContainerEntityManagerFactoryBean entityManagerFactory(ConfigurableListableBeanFactory theConfigurableListableBeanFactory) {
LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory(theConfigurableListableBeanFactory);
retVal.setPersistenceUnitName("HAPI_PU");
retVal.setDataSource(myDataSource);
retVal.setJpaProperties(myJpaProperties);

View File

@ -30,6 +30,7 @@ import ca.uhn.fhir.rest.server.interceptor.ResponseHighlighterInterceptor;
import org.springframework.beans.factory.annotation.Autowire;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
@ -62,8 +63,8 @@ public class FhirServerConfigDstu3 extends BaseJavaConfigDstu3 {
@Override
@Bean
public LocalContainerEntityManagerFactoryBean entityManagerFactory() {
LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory();
public LocalContainerEntityManagerFactoryBean entityManagerFactory(ConfigurableListableBeanFactory theConfigurableListableBeanFactory) {
LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory(theConfigurableListableBeanFactory);
retVal.setPersistenceUnitName("HAPI_PU");
retVal.setDataSource(myDataSource);
retVal.setJpaProperties(myJpaProperties);

View File

@ -28,6 +28,7 @@ import ca.uhn.fhir.rest.server.interceptor.ResponseHighlighterInterceptor;
import org.springframework.beans.factory.annotation.Autowire;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
@ -60,8 +61,8 @@ public class FhirServerConfigR4 extends BaseJavaConfigR4 {
@Override
@Bean
public LocalContainerEntityManagerFactoryBean entityManagerFactory() {
LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory();
public LocalContainerEntityManagerFactoryBean entityManagerFactory(ConfigurableListableBeanFactory theConfigurableListableBeanFactory) {
LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory(theConfigurableListableBeanFactory);
retVal.setPersistenceUnitName("HAPI_PU");
retVal.setDataSource(myDataSource);
retVal.setJpaProperties(myJpaProperties);

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version>
<version>5.6.0-PRE4-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version>
<version>5.6.0-PRE4-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version>
<version>5.6.0-PRE4-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version>
<version>5.6.0-PRE4-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version>
<version>5.6.0-PRE4-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version>
<version>5.6.0-PRE4-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -0,0 +1,5 @@
---
type: fix
issue: 2962
jira: SMILE-720
title: "Added a new DaoConfig setting called `setElasticSearchIndexPrefix(String prefix)` which will cause Hibernate search to prefix all of its tables with the provided value."

View File

@ -0,0 +1,3 @@
---
type: fix
title: "Fixed a bug where two identical tags in parallel entries being created in a batch would fail."

View File

@ -11,7 +11,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version>
<version>5.6.0-PRE4-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version>
<version>5.6.0-PRE4-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version>
<version>5.6.0-PRE4-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -258,6 +258,11 @@ public class DaoConfig {
private boolean myAccountForDateIndexNulls;
private boolean myTriggerSubscriptionsForNonVersioningChanges;
/**
* @since 5.6.0
*/
private String myElasicSearchIndexPrefix;
/**
* @since 5.6.0
*/
@ -269,6 +274,7 @@ public class DaoConfig {
private Integer myBundleBatchPoolSize = DEFAULT_BUNDLE_BATCH_POOL_SIZE;
private Integer myBundleBatchMaxPoolSize = DEFAULT_BUNDLE_BATCH_MAX_POOL_SIZE;
/**
* Constructor
*/
@ -2643,7 +2649,29 @@ public class DaoConfig {
return retval;
}
public enum StoreMetaSourceInformationEnum {
/**
*
* Sets a prefix for any indexes created when interacting with elasticsearch. This will apply to fulltext search indexes
* and terminology expansion indexes.
*
* @since 5.6.0
*/
public String getElasticSearchIndexPrefix() {
return myElasicSearchIndexPrefix;
}
/**
*
* Sets a prefix for any indexes created when interacting with elasticsearch. This will apply to fulltext search indexes
* and terminology expansion indexes.
*
* @since 5.6.0
*/
public void setElasticSearchIndexPrefix(String thePrefix) {
myElasicSearchIndexPrefix = thePrefix;
}
public enum StoreMetaSourceInformationEnum {
NONE(false, false),
SOURCE_URI(true, false),
REQUEST_ID(false, true),

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version>
<version>5.6.0-PRE4-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -24,18 +24,23 @@ import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.partition.SystemRequestDetails;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.jpa.util.QueryChunker;
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
import org.hl7.fhir.instance.model.api.IIdType;
import org.slf4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import javax.annotation.Nonnull;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Optional;
import java.util.stream.Collectors;
import static org.slf4j.LoggerFactory.getLogger;
@ -52,17 +57,19 @@ public class ResourceVersionSvcDaoImpl implements IResourceVersionSvc {
DaoRegistry myDaoRegistry;
@Autowired
IResourceTableDao myResourceTableDao;
@Autowired
IdHelperService myIdHelperService;
@Override
@Nonnull
public ResourceVersionMap getVersionMap(String theResourceName, SearchParameterMap theSearchParamMap) {
public ResourceVersionMap getVersionMap(RequestPartitionId theRequestPartitionId, String theResourceName, SearchParameterMap theSearchParamMap) {
IFhirResourceDao<?> dao = myDaoRegistry.getResourceDao(theResourceName);
if (ourLog.isDebugEnabled()) {
ourLog.debug("About to retrieve version map for resource type: {}", theResourceName);
}
List<Long> matchingIds = dao.searchForIds(theSearchParamMap, new SystemRequestDetails().setRequestPartitionId(RequestPartitionId.allPartitions())).stream()
List<Long> matchingIds = dao.searchForIds(theSearchParamMap, new SystemRequestDetails().setRequestPartitionId(theRequestPartitionId)).stream()
.map(ResourcePersistentId::getIdAsLong)
.collect(Collectors.toList());
@ -74,4 +81,95 @@ public class ResourceVersionSvcDaoImpl implements IResourceVersionSvc {
return ResourceVersionMap.fromResourceTableEntities(allById);
}
@Override
/**
* Retrieves the latest versions for any resourceid that are found.
* If they are not found, they will not be contained in the returned map.
* The key should be the same value that was passed in to allow
* consumer to look up the value using the id they already have.
*
* This method should not throw, so it can safely be consumed in
* transactions.
*
* @param theRequestPartitionId - request partition id
* @param theIds - list of IIdTypes for resources of interest.
* @return
*/
public ResourcePersistentIdMap getLatestVersionIdsForResourceIds(RequestPartitionId theRequestPartitionId, List<IIdType> theIds) {
ResourcePersistentIdMap idToPID = new ResourcePersistentIdMap();
HashMap<String, List<IIdType>> resourceTypeToIds = new HashMap<>();
for (IIdType id : theIds) {
String resourceType = id.getResourceType();
if (!resourceTypeToIds.containsKey(resourceType)) {
resourceTypeToIds.put(resourceType, new ArrayList<>());
}
resourceTypeToIds.get(resourceType).add(id);
}
for (String resourceType : resourceTypeToIds.keySet()) {
ResourcePersistentIdMap idAndPID = getIdsOfExistingResources(theRequestPartitionId,
resourceTypeToIds.get(resourceType));
idToPID.putAll(idAndPID);
}
return idToPID;
}
/**
* Helper method to determine if some resources exist in the DB (without throwing).
* Returns a set that contains the IIdType for every resource found.
* If it's not found, it won't be included in the set.
*
* @param theIds - list of IIdType ids (for the same resource)
* @return
*/
private ResourcePersistentIdMap getIdsOfExistingResources(RequestPartitionId thePartitionId,
Collection<IIdType> theIds) {
// these are the found Ids that were in the db
ResourcePersistentIdMap retval = new ResourcePersistentIdMap();
if (theIds == null || theIds.isEmpty()) {
return retval;
}
List<ResourcePersistentId> resourcePersistentIds = myIdHelperService.resolveResourcePersistentIdsWithCache(thePartitionId,
theIds.stream().collect(Collectors.toList()));
// we'll use this map to fetch pids that require versions
HashMap<Long, ResourcePersistentId> pidsToVersionToResourcePid = new HashMap<>();
// fill in our map
for (ResourcePersistentId pid : resourcePersistentIds) {
if (pid.getVersion() == null) {
pidsToVersionToResourcePid.put(pid.getIdAsLong(), pid);
}
Optional<IIdType> idOp = theIds.stream()
.filter(i -> i.getIdPart().equals(pid.getAssociatedResourceId().getIdPart()))
.findFirst();
// this should always be present
// since it was passed in.
// but land of optionals...
idOp.ifPresent(id -> {
retval.put(id, pid);
});
}
// set any versions we don't already have
if (!pidsToVersionToResourcePid.isEmpty()) {
Collection<Object[]> resourceEntries = myResourceTableDao
.getResourceVersionsForPid(new ArrayList<>(pidsToVersionToResourcePid.keySet()));
for (Object[] record : resourceEntries) {
// order matters!
Long retPid = (Long) record[0];
String resType = (String) record[1];
Long version = (Long) record[2];
pidsToVersionToResourcePid.get(retPid).setVersion(version);
}
}
return retval;
}
}

View File

@ -122,6 +122,7 @@ import ca.uhn.fhir.jpa.search.cache.DatabaseSearchCacheSvcImpl;
import ca.uhn.fhir.jpa.search.cache.DatabaseSearchResultCacheSvcImpl;
import ca.uhn.fhir.jpa.search.cache.ISearchCacheSvc;
import ca.uhn.fhir.jpa.search.cache.ISearchResultCacheSvc;
import ca.uhn.fhir.jpa.search.elastic.IndexNamePrefixLayoutStrategy;
import ca.uhn.fhir.jpa.search.reindex.IResourceReindexingSvc;
import ca.uhn.fhir.jpa.search.reindex.ResourceReindexer;
import ca.uhn.fhir.jpa.search.reindex.ResourceReindexingSvcImpl;
@ -155,6 +156,7 @@ import org.hl7.fhir.utilities.npm.FilesystemPackageCacheManager;
import org.springframework.batch.core.configuration.annotation.BatchConfigurer;
import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
@ -288,8 +290,8 @@ public abstract class BaseConfig {
* bean, but it provides a partially completed entity manager
* factory with HAPI FHIR customizations
*/
protected LocalContainerEntityManagerFactoryBean entityManagerFactory() {
LocalContainerEntityManagerFactoryBean retVal = new HapiFhirLocalContainerEntityManagerFactoryBean();
protected LocalContainerEntityManagerFactoryBean entityManagerFactory(ConfigurableListableBeanFactory myConfigurableListableBeanFactory) {
LocalContainerEntityManagerFactoryBean retVal = new HapiFhirLocalContainerEntityManagerFactoryBean(myConfigurableListableBeanFactory);
configureEntityManagerFactory(retVal, fhirContext());
return retVal;
}
@ -919,6 +921,11 @@ public abstract class BaseConfig {
return new PredicateBuilderFactory(theApplicationContext);
}
@Bean
public IndexNamePrefixLayoutStrategy indexLayoutStrategy() {
return new IndexNamePrefixLayoutStrategy();
}
@Bean
public JpaResourceLoader jpaResourceLoader() {
return new JpaResourceLoader();

View File

@ -23,6 +23,9 @@ package ca.uhn.fhir.jpa.config;
import org.hibernate.cfg.AvailableSettings;
import org.hibernate.query.criteria.LiteralHandlingMode;
import org.hibernate.resource.jdbc.spi.PhysicalConnectionHandlingMode;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
import org.springframework.orm.hibernate5.SpringBeanContainer;
import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean;
import java.util.Map;
@ -32,6 +35,14 @@ import java.util.Map;
* that sets some sensible default property values
*/
public class HapiFhirLocalContainerEntityManagerFactoryBean extends LocalContainerEntityManagerFactoryBean {
//https://stackoverflow.com/questions/57902388/how-to-inject-spring-beans-into-the-hibernate-envers-revisionlistener
ConfigurableListableBeanFactory myConfigurableListableBeanFactory;
public HapiFhirLocalContainerEntityManagerFactoryBean(ConfigurableListableBeanFactory theConfigurableListableBeanFactory) {
myConfigurableListableBeanFactory = theConfigurableListableBeanFactory;
}
@Override
public Map<String, Object> getJpaPropertyMap() {
Map<String, Object> retVal = super.getJpaPropertyMap();
@ -63,6 +74,11 @@ public class HapiFhirLocalContainerEntityManagerFactoryBean extends LocalContain
if (!retVal.containsKey(AvailableSettings.BATCH_VERSIONED_DATA)) {
retVal.put(AvailableSettings.BATCH_VERSIONED_DATA, "true");
}
// Why is this here, you ask? LocalContainerEntityManagerFactoryBean actually clobbers the setting hibernate needs
// in order to be able to resolve beans, so we add it back in manually here
if (!retVal.containsKey(AvailableSettings.BEAN_CONTAINER)) {
retVal.put(AvailableSettings.BEAN_CONTAINER, new SpringBeanContainer(myConfigurableListableBeanFactory));
}
return retVal;
}

View File

@ -30,7 +30,8 @@ import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
import ca.uhn.fhir.jpa.api.model.LazyDaoMethodOutcome;
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
import ca.uhn.fhir.jpa.cache.IResourceVersionSvc;
import ca.uhn.fhir.jpa.cache.ResourcePersistentIdMap;
import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource;
import ca.uhn.fhir.jpa.model.entity.ModelConfig;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
@ -94,7 +95,7 @@ public abstract class BaseStorageDao {
@Autowired
protected ModelConfig myModelConfig;
@Autowired
protected IdHelperService myIdHelperService;
protected IResourceVersionSvc myResourceVersionSvc;
@Autowired
protected DaoConfig myDaoConfig;
@ -211,7 +212,7 @@ public abstract class BaseStorageDao {
IIdType referenceElement = nextReference.getReferenceElement();
if (!referenceElement.hasBaseUrl()) {
Map<IIdType, ResourcePersistentId> idToPID = myIdHelperService.getLatestVersionIdsForResourceIds(RequestPartitionId.allPartitions(),
ResourcePersistentIdMap resourceVersionMap = myResourceVersionSvc.getLatestVersionIdsForResourceIds(RequestPartitionId.allPartitions(),
Collections.singletonList(referenceElement)
);
@ -220,12 +221,11 @@ public abstract class BaseStorageDao {
// 2) no resource exists, but we will create one (eventually). The version is 1
// 3) no resource exists, and none will be made -> throw
Long version;
if (idToPID.containsKey(referenceElement)) {
if (resourceVersionMap.containsKey(referenceElement)) {
// the resource exists... latest id
// will be the value in the ResourcePersistentId
version = idToPID.get(referenceElement).getVersion();
}
else if (myDaoConfig.isAutoCreatePlaceholderReferenceTargets()) {
version = resourceVersionMap.getResourcePersistentId(referenceElement).getVersion();
} else if (myDaoConfig.isAutoCreatePlaceholderReferenceTargets()) {
// if idToPID doesn't contain object
// but autcreateplaceholders is on
// then the version will be 1 (the first version)

View File

@ -35,7 +35,8 @@ import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
import ca.uhn.fhir.jpa.api.model.DeleteConflict;
import ca.uhn.fhir.jpa.api.model.DeleteConflictList;
import ca.uhn.fhir.jpa.api.model.DeleteMethodOutcome;
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
import ca.uhn.fhir.jpa.cache.IResourceVersionSvc;
import ca.uhn.fhir.jpa.cache.ResourcePersistentIdMap;
import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
import ca.uhn.fhir.jpa.delete.DeleteConflictService;
import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource;
@ -54,7 +55,6 @@ import ca.uhn.fhir.rest.api.PreferReturnEnum;
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.api.server.storage.DeferredInterceptorBroadcasts;
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
import ca.uhn.fhir.rest.param.ParameterUtil;
import ca.uhn.fhir.rest.server.RestfulServerUtils;
@ -160,7 +160,7 @@ public abstract class BaseTransactionProcessor {
private TaskExecutor myExecutor ;
@Autowired
private IdHelperService myIdHelperService;
private IResourceVersionSvc myResourceVersionSvc;
@VisibleForTesting
public void setDaoConfig(DaoConfig theDaoConfig) {
@ -366,8 +366,8 @@ public abstract class BaseTransactionProcessor {
IBase nextRequestEntry = null;
for (int i=0; i<requestEntriesSize; i++ ) {
nextRequestEntry = requestEntries.get(i);
BundleTask bundleTask = new BundleTask(completionLatch, theRequestDetails, responseMap, i, nextRequestEntry, theNestedMode);
getTaskExecutor().execute(bundleTask);
RetriableBundleTask retriableBundleTask = new RetriableBundleTask(completionLatch, theRequestDetails, responseMap, i, nextRequestEntry, theNestedMode);
getTaskExecutor().execute(retriableBundleTask);
}
// waiting for all tasks to be completed
@ -1271,25 +1271,24 @@ public abstract class BaseTransactionProcessor {
// get a map of
// existing ids -> PID (for resources that exist in the DB)
// should this be allPartitions?
Map<IIdType, ResourcePersistentId> idToPID = myIdHelperService.getLatestVersionIdsForResourceIds(RequestPartitionId.allPartitions(),
ResourcePersistentIdMap resourceVersionMap = myResourceVersionSvc.getLatestVersionIdsForResourceIds(RequestPartitionId.allPartitions(),
theReferencesToAutoVersion.stream()
.map(ref -> ref.getReferenceElement()).collect(Collectors.toList()));
.map(IBaseReference::getReferenceElement).collect(Collectors.toList()));
for (IBaseReference baseRef : theReferencesToAutoVersion) {
IIdType id = baseRef.getReferenceElement();
if (!idToPID.containsKey(id)
&& myDaoConfig.isAutoCreatePlaceholderReferenceTargets()) {
if (!resourceVersionMap.containsKey(id)
&& myDaoConfig.isAutoCreatePlaceholderReferenceTargets()) {
// not in the db, but autocreateplaceholders is true
// so the version we'll set is "1" (since it will be
// created later)
String newRef = id.withVersion("1").getValue();
id.setValue(newRef);
}
else {
} else {
// we will add the looked up info to the transaction
// for later
theTransactionDetails.addResolvedResourceId(id,
idToPID.get(id));
resourceVersionMap.getResourcePersistentId(id));
}
}
@ -1684,59 +1683,81 @@ public abstract class BaseTransactionProcessor {
return theStatusCode + " " + defaultString(Constants.HTTP_STATUS_NAMES.get(theStatusCode));
}
public class BundleTask implements Runnable {
public class RetriableBundleTask implements Runnable {
private CountDownLatch myCompletedLatch;
private RequestDetails myRequestDetails;
private IBase myNextReqEntry;
private Map<Integer, Object> myResponseMap;
private int myResponseOrder;
private boolean myNestedMode;
protected BundleTask(CountDownLatch theCompletedLatch, RequestDetails theRequestDetails, Map<Integer, Object> theResponseMap, int theResponseOrder, IBase theNextReqEntry, boolean theNestedMode) {
private final CountDownLatch myCompletedLatch;
private final RequestDetails myRequestDetails;
private final IBase myNextReqEntry;
private final Map<Integer, Object> myResponseMap;
private final int myResponseOrder;
private final boolean myNestedMode;
private BaseServerResponseException myLastSeenException;
protected RetriableBundleTask(CountDownLatch theCompletedLatch, RequestDetails theRequestDetails, Map<Integer, Object> theResponseMap, int theResponseOrder, IBase theNextReqEntry, boolean theNestedMode) {
this.myCompletedLatch = theCompletedLatch;
this.myRequestDetails = theRequestDetails;
this.myNextReqEntry = theNextReqEntry;
this.myResponseMap = theResponseMap;
this.myResponseOrder = theResponseOrder;
this.myResponseMap = theResponseMap;
this.myResponseOrder = theResponseOrder;
this.myNestedMode = theNestedMode;
this.myLastSeenException = null;
}
private void processBatchEntry() {
IBaseBundle subRequestBundle = myVersionAdapter.createBundle(org.hl7.fhir.r4.model.Bundle.BundleType.TRANSACTION.toCode());
myVersionAdapter.addEntry(subRequestBundle, myNextReqEntry);
IBaseBundle nextResponseBundle = processTransactionAsSubRequest(myRequestDetails, subRequestBundle, "Batch sub-request", myNestedMode);
IBase subResponseEntry = (IBase) myVersionAdapter.getEntries(nextResponseBundle).get(0);
myResponseMap.put(myResponseOrder, subResponseEntry);
/*
* If the individual entry didn't have a resource in its response, bring the sub-transaction's OperationOutcome across so the client can see it
*/
if (myVersionAdapter.getResource(subResponseEntry) == null) {
IBase nextResponseBundleFirstEntry = (IBase) myVersionAdapter.getEntries(nextResponseBundle).get(0);
myResponseMap.put(myResponseOrder, nextResponseBundleFirstEntry);
}
}
private boolean processBatchEntryWithRetry() {
int maxAttempts =3;
for (int attempt = 1;; attempt++) {
try {
processBatchEntry();
return true;
} catch (BaseServerResponseException e) {
//If we catch a known and structured exception from HAPI, just fail.
myLastSeenException = e;
return false;
} catch (Throwable t) {
myLastSeenException = new InternalErrorException(t);
//If we have caught a non-tag-storage failure we are unfamiliar with, or we have exceeded max attempts, exit.
if (!DaoFailureUtil.isTagStorageFailure(t) || attempt >= maxAttempts) {
ourLog.error("Failure during BATCH sub transaction processing", t);
return false;
}
}
}
}
@Override
public void run() {
BaseServerResponseExceptionHolder caughtEx = new BaseServerResponseExceptionHolder();
try {
IBaseBundle subRequestBundle = myVersionAdapter.createBundle(org.hl7.fhir.r4.model.Bundle.BundleType.TRANSACTION.toCode());
myVersionAdapter.addEntry(subRequestBundle, (IBase) myNextReqEntry);
IBaseBundle nextResponseBundle = processTransactionAsSubRequest(myRequestDetails, subRequestBundle, "Batch sub-request", myNestedMode);
IBase subResponseEntry = (IBase) myVersionAdapter.getEntries(nextResponseBundle).get(0);
myResponseMap.put(myResponseOrder, subResponseEntry);
/*
* If the individual entry didn't have a resource in its response, bring the sub-transaction's OperationOutcome across so the client can see it
*/
if (myVersionAdapter.getResource(subResponseEntry) == null) {
IBase nextResponseBundleFirstEntry = (IBase) myVersionAdapter.getEntries(nextResponseBundle).get(0);
myResponseMap.put(myResponseOrder, nextResponseBundleFirstEntry);
}
} catch (BaseServerResponseException e) {
caughtEx.setException(e);
} catch (Throwable t) {
ourLog.error("Failure during BATCH sub transaction processing", t);
caughtEx.setException(new InternalErrorException(t));
boolean success = processBatchEntryWithRetry();
if (!success) {
populateResponseMapWithLastSeenException();
}
if (caughtEx.getException() != null) {
// add exception to the response map
myResponseMap.put(myResponseOrder, caughtEx);
}
// checking for the parallelism
ourLog.debug("processing bacth for {} is completed", myVersionAdapter.getEntryRequestUrl((IBase)myNextReqEntry));
ourLog.debug("processing batch for {} is completed", myVersionAdapter.getEntryRequestUrl(myNextReqEntry));
myCompletedLatch.countDown();
}
private void populateResponseMapWithLastSeenException() {
BaseServerResponseExceptionHolder caughtEx = new BaseServerResponseExceptionHolder();
caughtEx.setException(myLastSeenException);
myResponseMap.put(myResponseOrder, caughtEx);
}
}
}

View File

@ -0,0 +1,18 @@
package ca.uhn.fhir.jpa.dao;
import org.apache.commons.lang3.StringUtils;
/**
* Utility class to help identify classes of failure.
*/
public class DaoFailureUtil {
public static boolean isTagStorageFailure(Throwable t) {
if (StringUtils.isBlank(t.getMessage())) {
return false;
} else {
String msg = t.getMessage().toLowerCase();
return msg.contains("hfj_tag_def") || msg.contains("hfj_res_tag");
}
}
}

View File

@ -303,7 +303,7 @@ public class IdHelperService {
if (forcedId.isPresent()) {
retVal.setValue(theResourceType + '/' + forcedId.get());
} else {
retVal.setValue(theResourceType + '/' + theId.toString());
retVal.setValue(theResourceType + '/' + theId);
}
return retVal;
@ -530,95 +530,6 @@ public class IdHelperService {
return retVal;
}
/**
* Helper method to determine if some resources exist in the DB (without throwing).
* Returns a set that contains the IIdType for every resource found.
* If it's not found, it won't be included in the set.
* @param theIds - list of IIdType ids (for the same resource)
* @return
*/
private Map<IIdType, ResourcePersistentId> getIdsOfExistingResources(RequestPartitionId thePartitionId,
Collection<IIdType> theIds) {
// these are the found Ids that were in the db
HashMap<IIdType, ResourcePersistentId> collected = new HashMap<>();
if (theIds == null || theIds.isEmpty()) {
return collected;
}
List<ResourcePersistentId> resourcePersistentIds = resolveResourcePersistentIdsWithCache(thePartitionId,
theIds.stream().collect(Collectors.toList()));
// we'll use this map to fetch pids that require versions
HashMap<Long, ResourcePersistentId> pidsToVersionToResourcePid = new HashMap<>();
// fill in our map
for (ResourcePersistentId pid : resourcePersistentIds) {
if (pid.getVersion() == null) {
pidsToVersionToResourcePid.put(pid.getIdAsLong(), pid);
}
Optional<IIdType> idOp = theIds.stream()
.filter(i -> i.getIdPart().equals(pid.getAssociatedResourceId().getIdPart()))
.findFirst();
// this should always be present
// since it was passed in.
// but land of optionals...
idOp.ifPresent(id -> {
collected.put(id, pid);
});
}
// set any versions we don't already have
if (!pidsToVersionToResourcePid.isEmpty()) {
Collection<Object[]> resourceEntries = myResourceTableDao
.getResourceVersionsForPid(new ArrayList<>(pidsToVersionToResourcePid.keySet()));
for (Object[] record : resourceEntries) {
// order matters!
Long retPid = (Long)record[0];
String resType = (String)record[1];
Long version = (Long)record[2];
pidsToVersionToResourcePid.get(retPid).setVersion(version);
}
}
return collected;
}
/**
* Retrieves the latest versions for any resourceid that are found.
* If they are not found, they will not be contained in the returned map.
* The key should be the same value that was passed in to allow
* consumer to look up the value using the id they already have.
*
* This method should not throw, so it can safely be consumed in
* transactions.
*
* @param theRequestPartitionId - request partition id
* @param theIds - list of IIdTypes for resources of interest.
* @return
*/
public Map<IIdType, ResourcePersistentId> getLatestVersionIdsForResourceIds(RequestPartitionId theRequestPartitionId, Collection<IIdType> theIds) {
HashMap<IIdType, ResourcePersistentId> idToPID = new HashMap<>();
HashMap<String, List<IIdType>> resourceTypeToIds = new HashMap<>();
for (IIdType id : theIds) {
String resourceType = id.getResourceType();
if (!resourceTypeToIds.containsKey(resourceType)) {
resourceTypeToIds.put(resourceType, new ArrayList<>());
}
resourceTypeToIds.get(resourceType).add(id);
}
for (String resourceType : resourceTypeToIds.keySet()) {
Map<IIdType, ResourcePersistentId> idAndPID = getIdsOfExistingResources(theRequestPartitionId,
resourceTypeToIds.get(resourceType));
idToPID.putAll(idAndPID);
}
return idToPID;
}
/**
* @deprecated This method doesn't take a partition ID as input, so it is unsafe. It
* should be reworked to include the partition ID before any new use is incorporated

View File

@ -25,6 +25,7 @@ import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.jpa.api.model.ResourceVersionConflictResolutionStrategy;
import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao;
import ca.uhn.fhir.jpa.dao.DaoFailureUtil;
import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
@ -93,10 +94,9 @@ public class HapiTransactionService {
* known to the system already, they'll both try to create a row in HFJ_TAG_DEF,
* which is the tag definition table. In that case, a constraint error will be
* thrown by one of the client threads, so we auto-retry in order to avoid
* annopying spurious failures for the client.
* annoying spurious failures for the client.
*/
if (e.getMessage().contains("HFJ_TAG_DEF") || e.getMessage().contains("hfj_tag_def") ||
e.getMessage().contains("HFJ_RES_TAG") || e.getMessage().contains("hfj_res_tag")) {
if (DaoFailureUtil.isTagStorageFailure(e)) {
maxRetries = 3;
}

View File

@ -99,6 +99,9 @@ public class ElasticsearchHibernatePropertiesBuilder {
theProperties.put(HibernateOrmMapperSettings.AUTOMATIC_INDEXING_SYNCHRONIZATION_STRATEGY, myDebugSyncStrategy);
theProperties.put(BackendSettings.backendKey(ElasticsearchBackendSettings.LOG_JSON_PRETTY_PRINTING), Boolean.toString(myDebugPrettyPrintJsonLog));
//This tells elasticsearch to use our custom index naming strategy.
theProperties.put(BackendSettings.backendKey(ElasticsearchBackendSettings.LAYOUT_STRATEGY), IndexNamePrefixLayoutStrategy.class.getName());
injectStartupTemplate(myProtocol, myRestUrl, myUsername, myPassword);
}
@ -149,7 +152,7 @@ public class ElasticsearchHibernatePropertiesBuilder {
*/
void injectStartupTemplate(String theProtocol, String theHostAndPort, String theUsername, String thePassword) {
PutIndexTemplateRequest ngramTemplate = new PutIndexTemplateRequest("ngram-template")
.patterns(Arrays.asList("resourcetable-*", "termconcept-*"))
.patterns(Arrays.asList("*resourcetable-*", "*termconcept-*"))
.settings(Settings.builder().put("index.max_ngram_diff", 50));
int colonIndex = theHostAndPort.indexOf(":");

View File

@ -0,0 +1,99 @@
package ca.uhn.fhir.jpa.search.elastic;
/*-
* #%L
* HAPI FHIR JPA Server
* %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.context.ConfigurationException;
import ca.uhn.fhir.jpa.api.config.DaoConfig;
import org.apache.commons.lang3.StringUtils;
import org.hibernate.search.backend.elasticsearch.index.layout.IndexLayoutStrategy;
import org.hibernate.search.backend.elasticsearch.logging.impl.Log;
import org.hibernate.search.util.common.logging.impl.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.lang.invoke.MethodHandles;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* This class instructs hibernate search on how to create index names for indexed entities.
* In our case, we use this class to add an optional prefix to all indices which are created, which can be controlled via
* {@link DaoConfig#setElasticSearchIndexPrefix(String)}.
*/
@Service
public class IndexNamePrefixLayoutStrategy implements IndexLayoutStrategy {
@Autowired
private DaoConfig myDaoConfig;
static final Log log = LoggerFactory.make(Log.class, MethodHandles.lookup());
public static final String NAME = "prefix";
public static final Pattern UNIQUE_KEY_EXTRACTION_PATTERN = Pattern.compile("(.*)-\\d{6}");
public String createInitialElasticsearchIndexName(String hibernateSearchIndexName) {
return addPrefixIfNecessary(hibernateSearchIndexName + "-000001");
}
public String createWriteAlias(String hibernateSearchIndexName) {
return addPrefixIfNecessary(hibernateSearchIndexName +"-write");
}
public String createReadAlias(String hibernateSearchIndexName) {
return addPrefixIfNecessary(hibernateSearchIndexName + "-read");
}
private String addPrefixIfNecessary(String theCandidateName) {
validateDaoConfigIsPresent();
if (!StringUtils.isBlank(myDaoConfig.getElasticSearchIndexPrefix())) {
return myDaoConfig.getElasticSearchIndexPrefix() + "-" + theCandidateName;
} else {
return theCandidateName;
}
}
public String extractUniqueKeyFromHibernateSearchIndexName(String hibernateSearchIndexName) {
return hibernateSearchIndexName;
}
public String extractUniqueKeyFromElasticsearchIndexName(String elasticsearchIndexName) {
Matcher matcher = UNIQUE_KEY_EXTRACTION_PATTERN.matcher(elasticsearchIndexName);
if (!matcher.matches()) {
throw log.invalidIndexPrimaryName(elasticsearchIndexName, UNIQUE_KEY_EXTRACTION_PATTERN);
} else {
String candidateUniqueKey= matcher.group(1);
return removePrefixIfNecessary(candidateUniqueKey);
}
}
private String removePrefixIfNecessary(String theCandidateUniqueKey) {
validateDaoConfigIsPresent();
if (!StringUtils.isBlank(myDaoConfig.getElasticSearchIndexPrefix())) {
return theCandidateUniqueKey.replace(myDaoConfig.getElasticSearchIndexPrefix() + "-", "");
} else {
return theCandidateUniqueKey;
}
}
private void validateDaoConfigIsPresent() {
if (myDaoConfig == null) {
throw new ConfigurationException("While attempting to boot HAPI FHIR, the Hibernate Search bootstrapper failed to find the DaoConfig. This probably means Hibernate Search has been recently upgraded, or somebody modified HapiFhirLocalContainerEntityManagerFactoryBean.");
}
}
}

View File

@ -21,12 +21,12 @@ public class ResourceVersionCacheSvcTest extends BaseJpaR4Test {
IIdType patientId = myPatientDao.create(patient).getId();
ResourceVersionMap versionMap = myResourceVersionCacheSvc.getVersionMap("Patient", SearchParameterMap.newSynchronous());
assertEquals(1, versionMap.size());
assertEquals("1", versionMap.getVersion(patientId));
assertEquals(1L, versionMap.getVersion(patientId));
patient.setGender(Enumerations.AdministrativeGender.MALE);
myPatientDao.update(patient);
versionMap = myResourceVersionCacheSvc.getVersionMap("Patient", SearchParameterMap.newSynchronous());
assertEquals(1, versionMap.size());
assertEquals("2", versionMap.getVersion(patientId));
assertEquals(2L, versionMap.getVersion(patientId));
}
}

View File

@ -0,0 +1,147 @@
package ca.uhn.fhir.jpa.config;
import ca.uhn.fhir.context.ConfigurationException;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.config.BlockLargeNumbersOfParamsListener;
import ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect;
import ca.uhn.fhir.jpa.config.HapiFhirLocalContainerEntityManagerFactoryBean;
import ca.uhn.fhir.jpa.dao.r4.ElasticsearchPrefixTest;
import ca.uhn.fhir.jpa.search.elastic.HapiElasticsearchAnalysisConfigurer;
import ca.uhn.fhir.jpa.search.elastic.IndexNamePrefixLayoutStrategy;
import ca.uhn.fhir.jpa.search.lastn.ElasticsearchRestClientFactory;
import ca.uhn.fhir.jpa.search.lastn.config.TestElasticsearchContainerHelper;
import ca.uhn.fhir.jpa.util.CurrentThreadCaptureQueriesListener;
import net.ttddyy.dsproxy.listener.logging.SLF4JLogLevel;
import net.ttddyy.dsproxy.support.ProxyDataSourceBuilder;
import org.apache.commons.dbcp2.BasicDataSource;
import org.elasticsearch.action.support.master.AcknowledgedResponse;
import org.elasticsearch.client.RequestOptions;
import org.elasticsearch.client.RestHighLevelClient;
import org.elasticsearch.client.indices.PutIndexTemplateRequest;
import org.elasticsearch.common.settings.Settings;
import org.hibernate.dialect.H2Dialect;
import org.hibernate.jpa.HibernatePersistenceProvider;
import org.hibernate.search.backend.elasticsearch.cfg.ElasticsearchBackendSettings;
import org.hibernate.search.backend.elasticsearch.cfg.ElasticsearchIndexSettings;
import org.hibernate.search.backend.elasticsearch.index.IndexStatus;
import org.hibernate.search.engine.cfg.BackendSettings;
import org.hibernate.search.mapper.orm.cfg.HibernateOrmMapperSettings;
import org.hibernate.search.mapper.orm.schema.management.SchemaManagementStrategyName;
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean;
import org.testcontainers.elasticsearch.ElasticsearchContainer;
import javax.sql.DataSource;
import java.io.IOException;
import java.util.Arrays;
import java.util.Properties;
import java.util.concurrent.TimeUnit;
/**
* The only reason this is its own class is so that we can set a dao config setting before the whole test framework comes online.
* We need to do this as it is during bean creation that HS bootstrapping occurs.
*/
@Configuration
public class ElasticsearchWithPrefixConfig {
@Bean
public DaoConfig daoConfig() {
DaoConfig daoConfig = new DaoConfig();
daoConfig.setElasticSearchIndexPrefix(ElasticsearchPrefixTest.ELASTIC_PREFIX);
return daoConfig;
}
@Bean
public IndexNamePrefixLayoutStrategy indexNamePrefixLayoutStrategy() {
return new IndexNamePrefixLayoutStrategy();
}
@Bean
public FhirContext fhirContext() {
return FhirContext.forR4();
}
@Bean
public LocalContainerEntityManagerFactoryBean entityManagerFactory(ConfigurableListableBeanFactory theConfigurableListableBeanFactory) {
LocalContainerEntityManagerFactoryBean retVal = new HapiFhirLocalContainerEntityManagerFactoryBean(theConfigurableListableBeanFactory);
retVal.setJpaDialect(new HapiFhirHibernateJpaDialect(fhirContext().getLocalizer()));
retVal.setPackagesToScan("ca.uhn.fhir.jpa.model.entity", "ca.uhn.fhir.jpa.entity");
retVal.setPersistenceProvider(new HibernatePersistenceProvider());
retVal.setPersistenceUnitName("PU_HapiFhirJpaR4");
retVal.setDataSource(dataSource());
retVal.setJpaProperties(jpaProperties());
return retVal;
}
@Bean
public DataSource dataSource() {
BasicDataSource retVal = new BasicDataSource();
retVal.setDriver(new org.h2.Driver());
retVal.setUrl("jdbc:h2:mem:testdb_r4");
retVal.setMaxWaitMillis(30000);
retVal.setUsername("");
retVal.setPassword("");
retVal.setMaxTotal(5);
SLF4JLogLevel level = SLF4JLogLevel.INFO;
DataSource dataSource = ProxyDataSourceBuilder
.create(retVal)
.logSlowQueryBySlf4j(10, TimeUnit.SECONDS, level)
.beforeQuery(new BlockLargeNumbersOfParamsListener())
.afterQuery(new CurrentThreadCaptureQueriesListener())
.build();
return dataSource;
}
@Bean
public Properties jpaProperties() {
Properties extraProperties = new Properties();
extraProperties.put("hibernate.format_sql", "false");
extraProperties.put("hibernate.show_sql", "false");
extraProperties.put("hibernate.hbm2ddl.auto", "update");
extraProperties.put("hibernate.dialect", H2Dialect.class.getName());
//Override default lucene settings
// Force elasticsearch to start first
int httpPort = elasticContainer().getMappedPort(9200);//9200 is the HTTP port
String host = elasticContainer().getHost();
// the below properties are used for ElasticSearch integration
extraProperties.put(BackendSettings.backendKey(BackendSettings.TYPE), "elasticsearch");
extraProperties.put(BackendSettings.backendKey(ElasticsearchIndexSettings.ANALYSIS_CONFIGURER), HapiElasticsearchAnalysisConfigurer.class.getName());
extraProperties.put(BackendSettings.backendKey(ElasticsearchBackendSettings.HOSTS), host + ":" + httpPort);
extraProperties.put(BackendSettings.backendKey(ElasticsearchBackendSettings.PROTOCOL), "http");
extraProperties.put(HibernateOrmMapperSettings.SCHEMA_MANAGEMENT_STRATEGY, SchemaManagementStrategyName.CREATE.externalRepresentation());
extraProperties.put(BackendSettings.backendKey(ElasticsearchIndexSettings.SCHEMA_MANAGEMENT_MINIMAL_REQUIRED_STATUS_WAIT_TIMEOUT), Long.toString(10000));
extraProperties.put(BackendSettings.backendKey(ElasticsearchIndexSettings.SCHEMA_MANAGEMENT_MINIMAL_REQUIRED_STATUS), IndexStatus.YELLOW.externalRepresentation());
// Need the mapping to be dynamic because of terminology indexes.
extraProperties.put(BackendSettings.backendKey(ElasticsearchIndexSettings.DYNAMIC_MAPPING), "true");
// Only for unit tests
extraProperties.put(HibernateOrmMapperSettings.AUTOMATIC_INDEXING_SYNCHRONIZATION_STRATEGY, "read-sync");
extraProperties.put(BackendSettings.backendKey(ElasticsearchBackendSettings.LOG_JSON_PRETTY_PRINTING), Boolean.toString(true));
//This tells elasticsearch to use our custom index naming strategy.
extraProperties.put(BackendSettings.backendKey(ElasticsearchBackendSettings.LAYOUT_STRATEGY), IndexNamePrefixLayoutStrategy.class.getName());
PutIndexTemplateRequest ngramTemplate = new PutIndexTemplateRequest("ngram-template")
.patterns(Arrays.asList("*resourcetable-*", "*termconcept-*"))
.settings(Settings.builder().put("index.max_ngram_diff", 50));
try {
RestHighLevelClient elasticsearchHighLevelRestClient = ElasticsearchRestClientFactory.createElasticsearchHighLevelRestClient("http://" + host, httpPort, "", "");
AcknowledgedResponse acknowledgedResponse = elasticsearchHighLevelRestClient.indices().putTemplate(ngramTemplate, RequestOptions.DEFAULT);
assert acknowledgedResponse.isAcknowledged();
} catch (IOException theE) {
theE.printStackTrace();
throw new ConfigurationException("Couldn't connect to the elasticsearch server to create necessary templates. Ensure the Elasticsearch user has permissions to create templates.");
}
return extraProperties;
}
@Bean
public ElasticsearchContainer elasticContainer() {
ElasticsearchContainer embeddedElasticSearch = TestElasticsearchContainerHelper.getEmbeddedElasticSearch();
embeddedElasticSearch.start();
return embeddedElasticSearch;
}
}

View File

@ -16,6 +16,7 @@ import org.hibernate.search.engine.cfg.BackendSettings;
import org.hibernate.search.mapper.orm.cfg.HibernateOrmMapperSettings;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
@ -134,8 +135,8 @@ public class TestDstu2Config extends BaseJavaConfigDstu2 {
@Override
@Bean
public LocalContainerEntityManagerFactoryBean entityManagerFactory() {
LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory();
public LocalContainerEntityManagerFactoryBean entityManagerFactory(ConfigurableListableBeanFactory theConfigurableListableBeanFactory) {
LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory(theConfigurableListableBeanFactory);
retVal.setPersistenceUnitName("PU_HapiFhirJpaDstu2");
retVal.setDataSource(dataSource());
retVal.setJpaProperties(jpaProperties());

View File

@ -15,6 +15,7 @@ import org.hibernate.search.backend.lucene.cfg.LuceneBackendSettings;
import org.hibernate.search.backend.lucene.cfg.LuceneIndexSettings;
import org.hibernate.search.engine.cfg.BackendSettings;
import org.hibernate.search.mapper.orm.cfg.HibernateOrmMapperSettings;
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
@ -138,8 +139,8 @@ public class TestDstu3Config extends BaseJavaConfigDstu3 {
@Override
@Bean
public LocalContainerEntityManagerFactoryBean entityManagerFactory() {
LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory();
public LocalContainerEntityManagerFactoryBean entityManagerFactory(ConfigurableListableBeanFactory theConfigurableListableBeanFactory) {
LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory(theConfigurableListableBeanFactory);
retVal.setPersistenceUnitName("PU_HapiFhirJpaDstu3");
retVal.setDataSource(dataSource());
retVal.setJpaProperties(jpaProperties());

View File

@ -3,12 +3,14 @@ package ca.uhn.fhir.jpa.config;
import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.jpa.model.entity.ModelConfig;
import ca.uhn.fhir.jpa.search.elastic.IndexNamePrefixLayoutStrategy;
import ca.uhn.fhir.jpa.subscription.SubscriptionTestUtil;
import ca.uhn.fhir.jpa.subscription.channel.config.SubscriptionChannelConfig;
import ca.uhn.fhir.jpa.subscription.match.config.SubscriptionProcessorConfig;
import ca.uhn.fhir.jpa.subscription.match.deliver.resthook.SubscriptionDeliveringRestHookSubscriber;
import ca.uhn.fhir.jpa.subscription.submit.config.SubscriptionSubmitterConfig;
import ca.uhn.fhir.test.utilities.BatchJobHelper;
import org.hibernate.search.backend.elasticsearch.index.layout.IndexLayoutStrategy;
import org.springframework.batch.core.explore.JobExplorer;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;

View File

@ -1,5 +1,6 @@
package ca.uhn.fhir.jpa.config;
import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.jpa.batch.BatchJobsConfig;
import ca.uhn.fhir.jpa.batch.api.IBatchJobSubmitter;
import ca.uhn.fhir.jpa.batch.svc.BatchJobSubmitterImpl;
@ -14,6 +15,8 @@ import net.ttddyy.dsproxy.listener.logging.SLF4JLogLevel;
import net.ttddyy.dsproxy.support.ProxyDataSourceBuilder;
import org.apache.commons.dbcp2.BasicDataSource;
import org.hibernate.dialect.H2Dialect;
import org.hibernate.jpa.HibernatePersistenceProvider;
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
@ -138,10 +141,15 @@ public class TestR4Config extends BaseJavaConfigR4 {
return new SingleQueryCountHolder();
}
@Override
@Bean
public LocalContainerEntityManagerFactoryBean entityManagerFactory() {
LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory();
public LocalContainerEntityManagerFactoryBean entityManagerFactory(ConfigurableListableBeanFactory theConfigurableListableBeanFactory) {
LocalContainerEntityManagerFactoryBean retVal = new HapiFhirLocalContainerEntityManagerFactoryBean(theConfigurableListableBeanFactory);
configureEntityManagerFactory(retVal, fhirContext());
retVal.setJpaDialect(new HapiFhirHibernateJpaDialect(fhirContext().getLocalizer()));
retVal.setPackagesToScan("ca.uhn.fhir.jpa.model.entity", "ca.uhn.fhir.jpa.entity");
retVal.setPersistenceProvider(new HibernatePersistenceProvider());
retVal.setPersistenceUnitName("PU_HapiFhirJpaR4");
retVal.setDataSource(dataSource());
retVal.setJpaProperties(jpaProperties());

View File

@ -1,13 +1,19 @@
package ca.uhn.fhir.jpa.config;
import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.search.elastic.ElasticsearchHibernatePropertiesBuilder;
import ca.uhn.fhir.jpa.search.elastic.IndexNamePrefixLayoutStrategy;
import ca.uhn.fhir.jpa.search.lastn.config.TestElasticsearchContainerHelper;
import org.h2.index.Index;
import org.hibernate.search.backend.elasticsearch.index.IndexStatus;
import org.hibernate.search.backend.elasticsearch.index.layout.IndexLayoutStrategy;
import org.hibernate.search.mapper.orm.schema.management.SchemaManagementStrategyName;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.DependsOn;
import org.testcontainers.elasticsearch.ElasticsearchContainer;
import javax.annotation.PreDestroy;

View File

@ -3,9 +3,11 @@ package ca.uhn.fhir.jpa.config;
import java.util.Properties;
import org.hibernate.dialect.H2Dialect;
import org.hibernate.search.backend.elasticsearch.index.layout.IndexLayoutStrategy;
import org.hibernate.search.backend.lucene.cfg.LuceneBackendSettings;
import org.hibernate.search.engine.cfg.BackendSettings;
import org.hibernate.search.mapper.orm.cfg.HibernateOrmMapperSettings;
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean;
@ -27,8 +29,8 @@ public class TestR4WithLuceneDisabledConfig extends TestR4Config {
@Override
@Bean
public LocalContainerEntityManagerFactoryBean entityManagerFactory() {
LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory();
public LocalContainerEntityManagerFactoryBean entityManagerFactory(ConfigurableListableBeanFactory theConfigurableListableBeanFactory) {
LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory(theConfigurableListableBeanFactory);
retVal.setDataSource(dataSource());
retVal.setJpaProperties(jpaProperties());
return retVal;

View File

@ -16,6 +16,7 @@ import org.hibernate.search.backend.lucene.cfg.LuceneIndexSettings;
import org.hibernate.search.engine.cfg.BackendSettings;
import org.hibernate.search.mapper.orm.cfg.HibernateOrmMapperSettings;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
@ -138,8 +139,8 @@ public class TestR5Config extends BaseJavaConfigR5 {
@Override
@Bean
public LocalContainerEntityManagerFactoryBean entityManagerFactory() {
LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory();
public LocalContainerEntityManagerFactoryBean entityManagerFactory(ConfigurableListableBeanFactory theConfigurableListableBeanFactory) {
LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory(theConfigurableListableBeanFactory);
retVal.setPersistenceUnitName("PU_HapiFhirJpaR5");
retVal.setDataSource(dataSource());
retVal.setJpaProperties(jpaProperties());

View File

@ -721,6 +721,4 @@ public abstract class BaseJpaTest extends BaseTest {
}
Thread.sleep(500);
}
}

View File

@ -5,6 +5,7 @@ import ca.uhn.fhir.context.FhirVersionEnum;
import ca.uhn.fhir.interceptor.executor.InterceptorService;
import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.cache.IResourceVersionSvc;
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
import ca.uhn.fhir.jpa.dao.r4.TransactionProcessorVersionAdapterR4;
import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
@ -70,6 +71,8 @@ public class TransactionProcessorTest {
private MatchUrlService myMatchUrlService;
@MockBean
private IRequestPartitionHelperSvc myRequestPartitionHelperSvc;
@MockBean
private IResourceVersionSvc myResourceVersionSvc;
@MockBean(answer = Answers.RETURNS_DEEP_STUBS)
private SessionImpl mySession;

View File

@ -1,11 +1,12 @@
package ca.uhn.fhir.jpa.dao.index;
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
import ca.uhn.fhir.jpa.cache.ResourcePersistentIdMap;
import ca.uhn.fhir.jpa.cache.ResourceVersionSvcDaoImpl;
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
import ca.uhn.fhir.jpa.model.entity.ForcedId;
import ca.uhn.fhir.jpa.util.MemoryCacheService;
import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
import org.hl7.fhir.instance.model.api.IIdType;
@ -17,7 +18,6 @@ import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.junit.jupiter.MockitoExtension;
import javax.persistence.EntityManager;
import javax.persistence.TypedQuery;
import javax.persistence.criteria.CriteriaBuilder;
import javax.persistence.criteria.CriteriaQuery;
@ -28,13 +28,14 @@ import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertSame;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.when;
@ExtendWith(MockitoExtension.class)
public class IdHelperServiceTest {
public class ResourceVersionSvcTest {
// helper class to package up data for helper methods
private class ResourceIdPackage {
@ -52,19 +53,15 @@ public class IdHelperServiceTest {
}
@Mock
private IResourceTableDao myResourceTableDao;
DaoRegistry myDaoRegistry;
@Mock
private DaoConfig myDaoConfig;
IResourceTableDao myResourceTableDao;
@Mock
private MemoryCacheService myMemoryCacheService;
@Mock
private EntityManager myEntityManager;
IdHelperService myIdHelperService;
// TODO KHS move the methods that use this out to a separate test class
@InjectMocks
private IdHelperService myIdHelperService;
private ResourceVersionSvcDaoImpl myResourceVersionSvc;
/**
* Gets a ResourceTable record for getResourceVersionsForPid
@ -92,20 +89,7 @@ public class IdHelperServiceTest {
Root<ForcedId> from = Mockito.mock(Root.class);
Path path = Mockito.mock(Path.class);
Mockito.when(cb.createQuery(Mockito.any(Class.class)))
.thenReturn(criteriaQuery);
Mockito.when(criteriaQuery.from(Mockito.any(Class.class)))
.thenReturn(from);
Mockito.when(from.get(Mockito.anyString()))
.thenReturn(path);
TypedQuery<ForcedId> queryMock = Mockito.mock(TypedQuery.class);
Mockito.when(queryMock.getResultList()).thenReturn(new ArrayList<>()); // not found
Mockito.when(myEntityManager.getCriteriaBuilder())
.thenReturn(cb);
Mockito.when(myEntityManager.createQuery(Mockito.any(CriteriaQuery.class)))
.thenReturn(queryMock);
}
/**
@ -130,15 +114,11 @@ public class IdHelperServiceTest {
ResourcePersistentId first = resourcePersistentIds.remove(0);
if (resourcePersistentIds.isEmpty()) {
Mockito.when(myMemoryCacheService.getIfPresent(Mockito.any(MemoryCacheService.CacheEnum.class), Mockito.anyString()))
.thenReturn(first).thenReturn(null);
when(myIdHelperService.resolveResourcePersistentIdsWithCache(any(), any())).thenReturn(Collections.singletonList(first));
}
else {
Mockito.when(myMemoryCacheService.getIfPresent(Mockito.any(MemoryCacheService.CacheEnum.class), Mockito.anyString()))
.thenReturn(first, resourcePersistentIds.toArray());
when(myIdHelperService.resolveResourcePersistentIdsWithCache(any(), any())).thenReturn(resourcePersistentIds);
}
Mockito.when(myResourceTableDao.getResourceVersionsForPid(Mockito.anyList()))
.thenReturn(matches);
}
@Test
@ -154,7 +134,7 @@ public class IdHelperServiceTest {
mockReturnsFor_getIdsOfExistingResources(pack);
// test
Map<IIdType, ResourcePersistentId> retMap = myIdHelperService.getLatestVersionIdsForResourceIds(RequestPartitionId.allPartitions(),
ResourcePersistentIdMap retMap = myResourceVersionSvc.getLatestVersionIdsForResourceIds(RequestPartitionId.allPartitions(),
Collections.singletonList(type));
Assertions.assertTrue(retMap.containsKey(type));
@ -169,7 +149,7 @@ public class IdHelperServiceTest {
mock_resolveResourcePersistentIdsWithCache_toReturnNothing();
// test
Map<IIdType, ResourcePersistentId> retMap = myIdHelperService.getLatestVersionIdsForResourceIds(RequestPartitionId.allPartitions(),
ResourcePersistentIdMap retMap = myResourceVersionSvc.getLatestVersionIdsForResourceIds(RequestPartitionId.allPartitions(),
Collections.singletonList(type));
Assertions.assertTrue(retMap.isEmpty());
@ -191,7 +171,7 @@ public class IdHelperServiceTest {
mockReturnsFor_getIdsOfExistingResources(pack);
// test
Map<IIdType, ResourcePersistentId> retMap = myIdHelperService.getLatestVersionIdsForResourceIds(
ResourcePersistentIdMap retMap = myResourceVersionSvc.getLatestVersionIdsForResourceIds(
RequestPartitionId.allPartitions(),
Arrays.asList(type, type2)
);

View File

@ -0,0 +1,49 @@
package ca.uhn.fhir.jpa.dao.r4;
import ca.uhn.fhir.jpa.config.ElasticsearchWithPrefixConfig;
import ca.uhn.fhir.jpa.search.lastn.ElasticsearchRestClientFactory;
import ca.uhn.fhir.test.utilities.docker.RequiresDocker;
import org.apache.http.util.EntityUtils;
import org.elasticsearch.client.Request;
import org.elasticsearch.client.Response;
import org.elasticsearch.client.RestClient;
import org.elasticsearch.client.RestHighLevelClient;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit.jupiter.SpringExtension;
import org.testcontainers.elasticsearch.ElasticsearchContainer;
import java.io.IOException;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.containsString;
@RequiresDocker
@ExtendWith(SpringExtension.class)
@ContextConfiguration(classes = {ElasticsearchWithPrefixConfig.class})
public class ElasticsearchPrefixTest {
@Autowired
ElasticsearchContainer elasticsearchContainer;
public static String ELASTIC_PREFIX = "hapi-fhir";
@Test
public void test() throws IOException {
//Given
RestHighLevelClient elasticsearchHighLevelRestClient = ElasticsearchRestClientFactory.createElasticsearchHighLevelRestClient(
"http://" + elasticsearchContainer.getHost(), elasticsearchContainer.getMappedPort(9200), "", "");
//When
RestClient lowLevelClient = elasticsearchHighLevelRestClient.getLowLevelClient();
Response get = lowLevelClient.performRequest(new Request("GET", "/_cat/indices"));
String catIndexes = EntityUtils.toString(get.getEntity());
//Then
assertThat(catIndexes, containsString(ELASTIC_PREFIX + "-resourcetable-000001"));
assertThat(catIndexes, containsString(ELASTIC_PREFIX + "-termconcept-000001"));
}
}

View File

@ -9,11 +9,15 @@ import static org.junit.jupiter.api.Assertions.assertEquals;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Properties;
import java.util.stream.Collectors;
import ca.uhn.fhir.context.support.ValueSetExpansionOptions;
import ca.uhn.fhir.jpa.search.elastic.ElasticsearchHibernatePropertiesBuilder;
import ca.uhn.fhir.test.utilities.docker.RequiresDocker;
import org.hamcrest.Matchers;
import org.hibernate.search.backend.elasticsearch.index.IndexStatus;
import org.hibernate.search.mapper.orm.schema.management.SchemaManagementStrategyName;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.Bundle;
import org.hl7.fhir.r4.model.CodeSystem;
@ -28,6 +32,8 @@ import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.test.annotation.DirtiesContext;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit.jupiter.SpringExtension;
@ -341,6 +347,4 @@ public class FhirResourceDaoR4SearchWithElasticSearchIT extends BaseJpaTest {
}
}

View File

@ -10,6 +10,7 @@ import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamString;
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.model.entity.ResourceTag;
import ca.uhn.fhir.jpa.model.entity.TagTypeEnum;
import ca.uhn.fhir.jpa.partition.SystemRequestDetails;
import ca.uhn.fhir.jpa.provider.SystemProviderDstu2Test;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum;

View File

@ -249,6 +249,45 @@ public class ResourceProviderR4BundleTest extends BaseResourceProviderR4Test {
assertEquals(ids.get(4), bundleEntries.get(6).getResource().getIdElement().toUnqualifiedVersionless().getValueAsString());
}
@Test
public void testTagCacheWorksWithBatchMode() {
Bundle input = new Bundle();
input.setType(BundleType.BATCH);
Patient p = new Patient();
p.setId("100");
p.setGender(AdministrativeGender.MALE);
p.addIdentifier().setSystem("urn:foo").setValue("A");
p.addName().setFamily("Smith");
p.getMeta().addTag().setSystem("mysystem").setCode("mycode");
input.addEntry().setResource(p).getRequest().setMethod(HTTPVerb.POST);
Patient p2 = new Patient();
p2.setId("200");
p2.setGender(AdministrativeGender.MALE);
p2.addIdentifier().setSystem("urn:foo").setValue("A");
p2.addName().setFamily("Smith");
p2.getMeta().addTag().setSystem("mysystem").setCode("mycode");
input.addEntry().setResource(p).getRequest().setMethod(HTTPVerb.POST);
Patient p3 = new Patient();
p3.setId("pat-300");
p3.setGender(AdministrativeGender.MALE);
p3.addIdentifier().setSystem("urn:foo").setValue("A");
p3.addName().setFamily("Smith");
p3.getMeta().addTag().setSystem("mysystem").setCode("mycode");
input.addEntry().setResource(p).getRequest().setMethod(HTTPVerb.PUT).setUrl("Patient/pat-300");
Bundle output = myClient.transaction().withBundle(input).execute();
output.getEntry().stream()
.map(BundleEntryComponent::getResponse)
.map(Bundle.BundleEntryResponseComponent::getStatus)
.forEach(statusCode -> {
assertEquals(statusCode, "201 Created");
});
}
private List<String> createPatients(int count) {
List<String> ids = new ArrayList<String>();

View File

@ -14,6 +14,7 @@ import ca.uhn.fhir.jpa.cache.ResourceChangeListenerCache;
import ca.uhn.fhir.jpa.cache.ResourceChangeListenerCacheFactory;
import ca.uhn.fhir.jpa.cache.ResourceChangeListenerCacheRefresherImpl;
import ca.uhn.fhir.jpa.cache.ResourceChangeListenerRegistryImpl;
import ca.uhn.fhir.jpa.cache.ResourcePersistentIdMap;
import ca.uhn.fhir.jpa.cache.ResourceVersionMap;
import ca.uhn.fhir.jpa.dao.JpaResourceDao;
import ca.uhn.fhir.jpa.dao.TransactionProcessor;
@ -45,6 +46,7 @@ import com.google.common.collect.Lists;
import org.hamcrest.Matchers;
import org.hibernate.internal.SessionImpl;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.hl7.fhir.r4.model.Bundle;
import org.hl7.fhir.r4.model.ExplanationOfBenefit;
import org.junit.jupiter.api.AfterEach;
@ -323,10 +325,15 @@ public class GiantTransactionPerfTest {
@Nonnull
@Override
public ResourceVersionMap getVersionMap(String theResourceName, SearchParameterMap theSearchParamMap) {
public ResourceVersionMap getVersionMap(RequestPartitionId theRequestPartitionId, String theResourceName, SearchParameterMap theSearchParamMap) {
myGetVersionMap++;
return ResourceVersionMap.fromResources(Lists.newArrayList());
}
@Override
public ResourcePersistentIdMap getLatestVersionIdsForResourceIds(RequestPartitionId thePartition, List<IIdType> theIds) {
return null;
}
}
private class MockResourceHistoryTableDao implements IResourceHistoryTableDao {

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version>
<version>5.6.0-PRE4-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -7,7 +7,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version>
<version>5.6.0-PRE4-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version>
<version>5.6.0-PRE4-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version>
<version>5.6.0-PRE4-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version>
<version>5.6.0-PRE4-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version>
<version>5.6.0-PRE4-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -20,9 +20,12 @@ package ca.uhn.fhir.jpa.cache;
* #L%
*/
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import org.hl7.fhir.instance.model.api.IIdType;
import javax.annotation.Nonnull;
import java.util.List;
/**
* This interface is used by the {@link IResourceChangeListenerCacheRefresher} to read resources matching the provided
@ -30,5 +33,12 @@ import javax.annotation.Nonnull;
*/
public interface IResourceVersionSvc {
@Nonnull
ResourceVersionMap getVersionMap(String theResourceName, SearchParameterMap theSearchParamMap);
ResourceVersionMap getVersionMap(RequestPartitionId theRequestPartitionId, String theResourceName, SearchParameterMap theSearchParamMap);
@Nonnull
default ResourceVersionMap getVersionMap(String theResourceName, SearchParameterMap theSearchParamMap) {
return getVersionMap(RequestPartitionId.allPartitions(), theResourceName, theSearchParamMap);
}
ResourcePersistentIdMap getLatestVersionIdsForResourceIds(RequestPartitionId thePartition, List<IIdType> theIds);
}

View File

@ -173,8 +173,8 @@ public class ResourceChangeListenerCacheRefresherImpl implements IResourceChange
List<IIdType> updatedIds = new ArrayList<>();
for (IIdType id : theNewResourceVersionMap.keySet()) {
String previousValue = theOldResourceVersionCache.put(id, theNewResourceVersionMap.get(id));
IIdType newId = id.withVersion(theNewResourceVersionMap.get(id));
Long previousValue = theOldResourceVersionCache.put(id, theNewResourceVersionMap.get(id));
IIdType newId = id.withVersion(theNewResourceVersionMap.get(id).toString());
if (previousValue == null) {
createdIds.add(newId);
} else if (!theNewResourceVersionMap.get(id).equals(previousValue)) {

View File

@ -0,0 +1,67 @@
package ca.uhn.fhir.jpa.cache;
/*-
* #%L
* HAPI FHIR Search Parameters
* %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
import org.hl7.fhir.instance.model.api.IIdType;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class ResourcePersistentIdMap {
private final Map<IIdType, ResourcePersistentId> myMap = new HashMap<>();
public static ResourcePersistentIdMap fromResourcePersistentIds(List<ResourcePersistentId> theResourcePersistentIds) {
ResourcePersistentIdMap retval = new ResourcePersistentIdMap();
theResourcePersistentIds.forEach(retval::add);
return retval;
}
private void add(ResourcePersistentId theResourcePersistentId) {
IIdType id = theResourcePersistentId.getAssociatedResourceId();
myMap.put(id.toUnqualifiedVersionless(), theResourcePersistentId);
}
public boolean containsKey(IIdType theId) {
return myMap.containsKey(theId.toUnqualifiedVersionless());
}
public ResourcePersistentId getResourcePersistentId(IIdType theId) {
return myMap.get(theId.toUnqualifiedVersionless());
}
public boolean isEmpty() {
return myMap.isEmpty();
}
public int size() {
return myMap.size();
}
public void put(IIdType theId, ResourcePersistentId thePid) {
myMap.put(theId, thePid);
}
public void putAll(ResourcePersistentIdMap theIdAndPID) {
myMap.putAll(theIdAndPID.myMap);
}
}

View File

@ -32,7 +32,7 @@ import java.util.Set;
* detect resources that were modified on remote servers in our cluster.
*/
public class ResourceVersionCache {
private final Map<IIdType, String> myVersionMap = new HashMap<>();
private final Map<IIdType, Long> myVersionMap = new HashMap<>();
public void clear() {
myVersionMap.clear();
@ -43,15 +43,15 @@ public class ResourceVersionCache {
* @param theVersion
* @return previous value
*/
public String put(IIdType theResourceId, String theVersion) {
public Long put(IIdType theResourceId, Long theVersion) {
return myVersionMap.put(new IdDt(theResourceId).toVersionless(), theVersion);
}
public String getVersionForResourceId(IIdType theResourceId) {
public Long getVersionForResourceId(IIdType theResourceId) {
return myVersionMap.get(new IdDt(theResourceId));
}
public String removeResourceId(IIdType theResourceId) {
public Long removeResourceId(IIdType theResourceId) {
return myVersionMap.remove(new IdDt(theResourceId));
}

View File

@ -24,6 +24,8 @@ import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.model.primitive.IdDt;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.instance.model.api.IIdType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Collections;
import java.util.HashMap;
@ -36,8 +38,10 @@ import java.util.Set;
* This immutable map holds a copy of current resource versions read from the repository.
*/
public class ResourceVersionMap {
private static final Logger ourLog = LoggerFactory.getLogger(ResourceVersionMap.class);
private final Set<IIdType> mySourceIds = new HashSet<>();
private final Map<IIdType, String> myMap = new HashMap<>();
// Key versionless id, value version
private final Map<IIdType, Long> myMap = new HashMap<>();
private ResourceVersionMap() {}
public static ResourceVersionMap fromResourceTableEntities(List<ResourceTable> theEntities) {
@ -57,13 +61,17 @@ public class ResourceVersionMap {
}
private void add(IIdType theId) {
if (theId.getVersionIdPart() == null) {
ourLog.warn("Not storing {} in ResourceVersionMap because it does not have a version.", theId);
return;
}
IdDt id = new IdDt(theId);
mySourceIds.add(id);
myMap.put(id.toUnqualifiedVersionless(), id.getVersionIdPart());
myMap.put(id.toUnqualifiedVersionless(), id.getVersionIdPartAsLong());
}
public String getVersion(IIdType theResourceId) {
return myMap.get(new IdDt(theResourceId.toUnqualifiedVersionless()));
public Long getVersion(IIdType theResourceId) {
return get(theResourceId);
}
public int size() {
@ -78,11 +86,15 @@ public class ResourceVersionMap {
return Collections.unmodifiableSet(mySourceIds);
}
public String get(IIdType theId) {
public Long get(IIdType theId) {
return myMap.get(new IdDt(theId.toUnqualifiedVersionless()));
}
public boolean containsKey(IIdType theId) {
return myMap.containsKey(new IdDt(theId.toUnqualifiedVersionless()));
}
public boolean isEmpty() {
return myMap.isEmpty();
}
}

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version>
<version>5.6.0-PRE4-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -6,7 +6,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version>
<version>5.6.0-PRE4-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -41,6 +41,7 @@ import org.hibernate.search.mapper.orm.cfg.HibernateOrmMapperSettings;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Import;
import org.springframework.context.annotation.Lazy;
@ -94,8 +95,8 @@ public class TestJpaDstu3Config extends BaseJavaConfigDstu3 {
@Override
@Bean
public LocalContainerEntityManagerFactoryBean entityManagerFactory() {
LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory();
public LocalContainerEntityManagerFactoryBean entityManagerFactory(ConfigurableListableBeanFactory theConfigurableListableBeanFactory) {
LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory(theConfigurableListableBeanFactory);
retVal.setPersistenceUnitName("PU_HapiFhirJpaDstu3");
retVal.setDataSource(dataSource());
retVal.setJpaProperties(jpaProperties());

View File

@ -41,6 +41,7 @@ import org.hibernate.search.mapper.orm.cfg.HibernateOrmMapperSettings;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Import;
import org.springframework.context.annotation.Lazy;
@ -94,8 +95,8 @@ public class TestJpaR4Config extends BaseJavaConfigR4 {
@Override
@Bean
public LocalContainerEntityManagerFactoryBean entityManagerFactory() {
LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory();
public LocalContainerEntityManagerFactoryBean entityManagerFactory(ConfigurableListableBeanFactory theConfigurableListableBeanFactory) {
LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory(theConfigurableListableBeanFactory);
retVal.setPersistenceUnitName("PU_HapiFhirJpaR4");
retVal.setDataSource(dataSource());
retVal.setJpaProperties(jpaProperties());

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version>
<version>5.6.0-PRE4-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -23,6 +23,7 @@ import org.hibernate.search.mapper.orm.cfg.HibernateOrmMapperSettings;
import org.hl7.fhir.dstu2.model.Subscription;
import org.hl7.fhir.r5.utils.IResourceValidator;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
@ -134,8 +135,8 @@ public class TestDstu2Config extends BaseJavaConfigDstu2 {
@Override
@Bean
public LocalContainerEntityManagerFactoryBean entityManagerFactory() {
LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory();
public LocalContainerEntityManagerFactoryBean entityManagerFactory(ConfigurableListableBeanFactory theConfigurableListableBeanFactory) {
LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory(theConfigurableListableBeanFactory);
retVal.setPersistenceUnitName("PU_HapiFhirJpaDstu2");
retVal.setDataSource(dataSource());
retVal.setJpaProperties(jpaProperties());

View File

@ -23,6 +23,7 @@ import org.hl7.fhir.dstu2.model.Subscription;
import org.hl7.fhir.r5.utils.IResourceValidator;
import org.springframework.beans.factory.annotation.Autowire;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
@ -140,8 +141,8 @@ public class TestDstu3Config extends BaseJavaConfigDstu3 {
@Override
@Bean
public LocalContainerEntityManagerFactoryBean entityManagerFactory() {
LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory();
public LocalContainerEntityManagerFactoryBean entityManagerFactory(ConfigurableListableBeanFactory theConfigurableListableBeanFactory) {
LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory(theConfigurableListableBeanFactory);
retVal.setPersistenceUnitName("PU_HapiFhirJpaDstu3");
retVal.setDataSource(dataSource());
retVal.setJpaProperties(jpaProperties());

View File

@ -24,6 +24,7 @@ import org.hl7.fhir.dstu2.model.Subscription;
import org.hl7.fhir.r5.utils.IResourceValidator;
import org.springframework.beans.factory.annotation.Autowire;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
@ -133,8 +134,8 @@ public class TestR4Config extends BaseJavaConfigR4 {
@Override
@Bean
public LocalContainerEntityManagerFactoryBean entityManagerFactory() {
LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory();
public LocalContainerEntityManagerFactoryBean entityManagerFactory(ConfigurableListableBeanFactory theConfigurableListableBeanFactory) {
LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory(theConfigurableListableBeanFactory);
retVal.setPersistenceUnitName("PU_HapiFhirJpaR4");
retVal.setDataSource(dataSource());
retVal.setJpaProperties(jpaProperties());

View File

@ -23,6 +23,7 @@ import org.hl7.fhir.dstu2.model.Subscription;
import org.hl7.fhir.r5.utils.IResourceValidator;
import org.springframework.beans.factory.annotation.Autowire;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.beans.factory.config.ConfigurableListableBeanFactory;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
@ -133,8 +134,8 @@ public class TestR5Config extends BaseJavaConfigR5 {
@Override
@Bean
public LocalContainerEntityManagerFactoryBean entityManagerFactory() {
LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory();
public LocalContainerEntityManagerFactoryBean entityManagerFactory(ConfigurableListableBeanFactory theConfigurableListableBeanFactory) {
LocalContainerEntityManagerFactoryBean retVal = super.entityManagerFactory(theConfigurableListableBeanFactory);
retVal.setPersistenceUnitName("PU_HapiFhirJpaR5");
retVal.setDataSource(dataSource());
retVal.setJpaProperties(jpaProperties());

View File

@ -7,7 +7,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version>
<version>5.6.0-PRE4-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -1,5 +1,25 @@
package ca.uhn.fhir.mdm.api;
/*-
* #%L
* HAPI FHIR - Master Data Management
* %%
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
public interface IMdmBatchJobSubmitterFactory {
IMdmClearJobSubmitter getClearJobSubmitter();
}

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version>
<version>5.6.0-PRE4-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version>
<version>5.6.0-PRE4-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -33,6 +33,7 @@ import java.util.Optional;
* a Long, a String, or something else.
*/
public class ResourcePersistentId {
private static final String RESOURCE_PID = "RESOURCE_PID";
private Object myId;
private Long myVersion;
private IIdType myAssociatedResourceId;

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version>
<version>5.6.0-PRE4-SNAPSHOT</version>
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-spring-boot-samples</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version>
<version>5.6.0-PRE4-SNAPSHOT</version>
</parent>
<artifactId>hapi-fhir-spring-boot-sample-client-apache</artifactId>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-spring-boot-samples</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version>
<version>5.6.0-PRE4-SNAPSHOT</version>
</parent>
<artifactId>hapi-fhir-spring-boot-sample-client-okhttp</artifactId>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-spring-boot-samples</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version>
<version>5.6.0-PRE4-SNAPSHOT</version>
</parent>
<artifactId>hapi-fhir-spring-boot-sample-server-jersey</artifactId>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-spring-boot</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version>
<version>5.6.0-PRE4-SNAPSHOT</version>
</parent>
<artifactId>hapi-fhir-spring-boot-samples</artifactId>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version>
<version>5.6.0-PRE4-SNAPSHOT</version>
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version>
<version>5.6.0-PRE4-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version>
<version>5.6.0-PRE4-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version>
<version>5.6.0-PRE4-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version>
<version>5.6.0-PRE4-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version>
<version>5.6.0-PRE4-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version>
<version>5.6.0-PRE4-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version>
<version>5.6.0-PRE4-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version>
<version>5.6.0-PRE4-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version>
<version>5.6.0-PRE4-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -18,6 +18,4 @@ public class FhirServerConfig {
DaoConfig retVal = new DaoConfig();
return retVal;
}
}

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version>
<version>5.6.0-PRE4-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version>
<version>5.6.0-PRE4-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version>
<version>5.6.0-PRE4-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version>
<version>5.6.0-PRE4-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version>
<version>5.6.0-PRE4-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-deployable-pom</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version>
<version>5.6.0-PRE4-SNAPSHOT</version>
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
</parent>

View File

@ -5,7 +5,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version>
<version>5.6.0-PRE4-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>
@ -58,37 +58,37 @@
<dependency>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-structures-dstu3</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version>
<version>5.6.0-PRE4-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-structures-hl7org-dstu2</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version>
<version>5.6.0-PRE4-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-structures-r4</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version>
<version>5.6.0-PRE4-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-structures-r5</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version>
<version>5.6.0-PRE4-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-validation-resources-dstu2</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version>
<version>5.6.0-PRE4-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-validation-resources-dstu3</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version>
<version>5.6.0-PRE4-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-validation-resources-r4</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version>
<version>5.6.0-PRE4-SNAPSHOT</version>
</dependency>
<dependency>
<groupId>org.apache.velocity</groupId>

View File

@ -4,7 +4,7 @@
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>5.6.0-PRE3-SNAPSHOT</version>
<version>5.6.0-PRE4-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>

View File

@ -6,7 +6,7 @@
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<packaging>pom</packaging>
<version>5.6.0-PRE3-SNAPSHOT</version>
<version>5.6.0-PRE4-SNAPSHOT</version>
<name>HAPI-FHIR</name>
<description>An open-source implementation of the FHIR specification in Java.</description>
<url>https://hapifhir.io</url>

Some files were not shown because too many files have changed in this diff Show More