Streamline Search SQLs (#2576)
* Streamline SQL queries * SQL query tidying * Add changelog * Test fixes * Tests passing * Ongoing work * Test fixes * Test fixes * Build fix * Cleanup * Fixes
This commit is contained in:
parent
dd61a8aa2c
commit
95d7ba6fa8
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE9-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE9-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE9-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -233,4 +233,9 @@ public class MethodOutcome {
|
|||
setCreated(true);
|
||||
}
|
||||
}
|
||||
|
||||
protected boolean hasResource() {
|
||||
return myResource != null;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -72,7 +72,7 @@ ca.uhn.fhir.jpa.bulk.export.svc.BulkDataExportSvcImpl.onlyBinarySelected=Binary
|
|||
ca.uhn.fhir.jpa.bulk.export.svc.BulkDataExportSvcImpl.unknownResourceType=Unknown or unsupported resource type: {0}
|
||||
ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect.resourceVersionConstraintFailure=The operation has failed with a version constraint failure. This generally means that two clients/threads were trying to update the same resource at the same time, and this request was chosen as the failing request.
|
||||
ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect.resourceIndexedCompositeStringUniqueConstraintFailure=The operation has failed with a unique index constraint failure. This probably means that the operation was trying to create/update a resource that would have resulted in a duplicate value for a unique index.
|
||||
ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect.forcedIdConstraintFailure=The operation has failed with a client-assigned ID constraint failure. This typically means that multiple client threads are trying to create a new resource with the same client-assigned ID at the same time, and this thread was chosen to be rejected.
|
||||
ca.uhn.fhir.jpa.config.HapiFhirHibernateJpaDialect.forcedIdConstraintFailure=The operation has failed with a client-assigned ID constraint failure. This typically means that multiple client threads are trying to create a new resource with the same client-assigned ID at the same time, and this thread was chosen to be rejected. It can also happen when a request disables the Upsert Existence Check.
|
||||
|
||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.externalizedBinaryStorageExtensionFoundInRequestBody=Illegal extension found in request payload - URL "{0}" and value "{1}"
|
||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.incomingNoopInTransaction=Transaction contains resource with operation NOOP. This is only valid as a response operation, not in a request
|
||||
|
|
|
@ -3,14 +3,14 @@
|
|||
<modelVersion>4.0.0</modelVersion>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-bom</artifactId>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE9-SNAPSHOT</version>
|
||||
<packaging>pom</packaging>
|
||||
<name>HAPI FHIR BOM</name>
|
||||
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE9-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE9-SNAPSHOT</version>
|
||||
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-cli</artifactId>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE9-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE9-SNAPSHOT</version>
|
||||
<relativePath>../../hapi-deployable-pom</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE9-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE9-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE9-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE9-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE9-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE9-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -83,13 +83,13 @@
|
|||
<dependency>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-structures-dstu2</artifactId>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE9-SNAPSHOT</version>
|
||||
<scope>compile</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-jpaserver-subscription</artifactId>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE9-SNAPSHOT</version>
|
||||
<scope>compile</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
|
@ -106,7 +106,7 @@
|
|||
<dependency>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-testpage-overlay</artifactId>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE9-SNAPSHOT</version>
|
||||
<classifier>classes</classifier>
|
||||
</dependency>
|
||||
<dependency>
|
||||
|
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
type: add
|
||||
issue: 2576
|
||||
title: "A new header called `X-Upsert-Extistence-Check` can now be added to JPA server *Create with Client-Assigned ID* operations
|
||||
(aka Upsert) in order to improve performance when loading data that is known to not exist by skipping the resource existence check."
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
type: perf
|
||||
issue: 2576
|
||||
title: "A new PID-to-forced-ID cache, and a new optional Match-URL-to-PID cache have been added. These
|
||||
can improve write performance when doing large loads."
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
type: perf
|
||||
issue: 2576
|
||||
title: "The generated search SQL statements have been optimized for simple JPA server searches containing
|
||||
only one parameter. In this case, an unnecessary JOIN has been removed."
|
|
@ -33,3 +33,17 @@ It can also be disabled at a more granular level (or selectively re-enabled if i
|
|||
"expression": "Observation.code"
|
||||
}
|
||||
```
|
||||
|
||||
# Disable Upsert Existence Check
|
||||
|
||||
If you are using an *Update with Client Assigned ID* (aka an Upsert), the server will perform a SQL Select in order to determine whether the ID already exists, and then proceed to create a new record if no data matches the existing row.
|
||||
|
||||
If you are sure that the row does not already exist, you can add the following header to your request in order to avoid this check.
|
||||
|
||||
```http
|
||||
X-Upsert-Extistence-Check: disabled
|
||||
```
|
||||
|
||||
This should improve write performance, so this header can be useful when large amounts of data will be created using client assigned IDs in a controlled fashion.
|
||||
|
||||
If this setting is used and a resource already exists with a given client-assigned ID, a database constraint error will prevent any duplicate records from being created, and the operation will fail.
|
||||
|
|
|
@ -11,7 +11,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE9-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE9-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE9-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE9-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -48,16 +48,16 @@ public class DaoConfig {
|
|||
* Default value for {@link #setReuseCachedSearchResultsForMillis(Long)}: 60000ms (one minute)
|
||||
*/
|
||||
public static final Long DEFAULT_REUSE_CACHED_SEARCH_RESULTS_FOR_MILLIS = DateUtils.MILLIS_PER_MINUTE;
|
||||
/**
|
||||
* See {@link #setStatusBasedReindexingDisabled(boolean)}
|
||||
*/
|
||||
public static final String DISABLE_STATUS_BASED_REINDEX = "disable_status_based_reindex";
|
||||
/**
|
||||
* Default value for {@link #setTranslationCachesExpireAfterWriteInMinutes(Long)}: 60 minutes
|
||||
*
|
||||
* @see #setTranslationCachesExpireAfterWriteInMinutes(Long)
|
||||
*/
|
||||
public static final Long DEFAULT_TRANSLATION_CACHES_EXPIRE_AFTER_WRITE_IN_MINUTES = 60L;
|
||||
/**
|
||||
* See {@link #setStatusBasedReindexingDisabled(boolean)}
|
||||
*/
|
||||
public static final String DISABLE_STATUS_BASED_REINDEX = "disable_status_based_reindex";
|
||||
/**
|
||||
* Default {@link #setBundleTypesAllowedForStorage(Set)} value:
|
||||
* <ul>
|
||||
|
@ -91,11 +91,6 @@ public class DaoConfig {
|
|||
|
||||
private final ModelConfig myModelConfig = new ModelConfig();
|
||||
|
||||
/**
|
||||
* update setter javadoc if default changes
|
||||
*/
|
||||
@Nonnull
|
||||
private Long myTranslationCachesExpireAfterWriteInMinutes = DEFAULT_TRANSLATION_CACHES_EXPIRE_AFTER_WRITE_IN_MINUTES;
|
||||
/**
|
||||
* update setter javadoc if default changes
|
||||
*/
|
||||
|
@ -201,6 +196,15 @@ public class DaoConfig {
|
|||
* @since 5.2.0
|
||||
*/
|
||||
private boolean myUseLegacySearchBuilder = false;
|
||||
/**
|
||||
* update setter javadoc if default changes
|
||||
*/
|
||||
@Nonnull
|
||||
private Long myTranslationCachesExpireAfterWriteInMinutes = DEFAULT_TRANSLATION_CACHES_EXPIRE_AFTER_WRITE_IN_MINUTES;
|
||||
/**
|
||||
* @since 5.4.0
|
||||
*/
|
||||
private boolean myMatchUrlCache;
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
|
@ -253,6 +257,43 @@ public class DaoConfig {
|
|||
return myUseLegacySearchBuilder;
|
||||
}
|
||||
|
||||
/**
|
||||
* Specifies the duration in minutes for which values will be retained after being
|
||||
* written to the terminology translation cache. Defaults to 60.
|
||||
*/
|
||||
@Nonnull
|
||||
public Long getTranslationCachesExpireAfterWriteInMinutes() {
|
||||
return myTranslationCachesExpireAfterWriteInMinutes;
|
||||
}
|
||||
|
||||
/**
|
||||
* If enabled, resolutions for match URLs (e.g. conditional create URLs, conditional update URLs, etc) will be
|
||||
* cached in an in-memory cache. This cache can have a noticeable improvement on write performance on servers
|
||||
* where conditional operations are frequently performed, but note that this cache will not be
|
||||
* invalidated based on updates to resources so this may have detrimental effects.
|
||||
*
|
||||
* Default is <code>false</code>
|
||||
*
|
||||
* @since 5.4.0
|
||||
*/
|
||||
public void setMatchUrlCache(boolean theMatchUrlCache) {
|
||||
myMatchUrlCache = theMatchUrlCache;
|
||||
}
|
||||
|
||||
/**
|
||||
* If enabled, resolutions for match URLs (e.g. conditional create URLs, conditional update URLs, etc) will be
|
||||
* cached in an in-memory cache. This cache can have a noticeable improvement on write performance on servers
|
||||
* where conditional operations are frequently performed, but note that this cache will not be
|
||||
* invalidated based on updates to resources so this may have detrimental effects.
|
||||
*
|
||||
* Default is <code>false</code>
|
||||
*
|
||||
* @since 5.4.0
|
||||
*/
|
||||
public boolean getMatchUrlCache() {
|
||||
return myMatchUrlCache;
|
||||
}
|
||||
|
||||
/**
|
||||
* This method controls whether to use the new non-hibernate search SQL builder that was introduced in HAPI FHIR 5.2.0.
|
||||
* By default this will be <code>false</code> meaning that the new SQL builder is used. Set to <code>true</code> to use the
|
||||
|
@ -882,23 +923,6 @@ public class DaoConfig {
|
|||
myReuseCachedSearchResultsForMillis = theReuseCachedSearchResultsForMillis;
|
||||
}
|
||||
|
||||
/**
|
||||
* Specifies the duration in minutes for which values will be retained after being
|
||||
* written to the terminology translation cache. Defaults to 60.
|
||||
*/
|
||||
@Nonnull
|
||||
public Long getTranslationCachesExpireAfterWriteInMinutes() {
|
||||
return myTranslationCachesExpireAfterWriteInMinutes;
|
||||
}
|
||||
|
||||
/**
|
||||
* Specifies the duration in minutes for which values will be retained after being
|
||||
* written to the terminology translation cache. Defaults to 60.
|
||||
*/
|
||||
public void setTranslationCachesExpireAfterWriteInMinutes(Long translationCachesExpireAfterWriteInMinutes) {
|
||||
myTranslationCachesExpireAfterWriteInMinutes = translationCachesExpireAfterWriteInMinutes;
|
||||
}
|
||||
|
||||
/**
|
||||
* This setting may be used to advise the server that any references found in
|
||||
* resources that have any of the base URLs given here will be treated as logical
|
||||
|
|
|
@ -22,6 +22,7 @@ package ca.uhn.fhir.jpa.api.model;
|
|||
|
||||
import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource;
|
||||
import ca.uhn.fhir.rest.api.MethodOutcome;
|
||||
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
|
||||
public class DaoMethodOutcome extends MethodOutcome {
|
||||
|
@ -29,6 +30,7 @@ public class DaoMethodOutcome extends MethodOutcome {
|
|||
private IBasePersistedResource myEntity;
|
||||
private IBaseResource myPreviousResource;
|
||||
private boolean myNop;
|
||||
private ResourcePersistentId myResourcePersistentId;
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
|
@ -82,4 +84,14 @@ public class DaoMethodOutcome extends MethodOutcome {
|
|||
super.setCreated(theCreated);
|
||||
return this;
|
||||
}
|
||||
|
||||
public DaoMethodOutcome setPersistentId(ResourcePersistentId theResourcePersistentId) {
|
||||
myResourcePersistentId = theResourcePersistentId;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ResourcePersistentId getPersistentId() {
|
||||
return myResourcePersistentId;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -0,0 +1,123 @@
|
|||
package ca.uhn.fhir.jpa.api.model;
|
||||
|
||||
/*
|
||||
* #%L
|
||||
* HAPI FHIR JPA API
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource;
|
||||
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
|
||||
import java.util.function.Supplier;
|
||||
|
||||
public class LazyDaoMethodOutcome extends DaoMethodOutcome {
|
||||
|
||||
private Supplier<EntityAndResource> myEntitySupplier;
|
||||
private Supplier<IIdType> myIdSupplier;
|
||||
private Runnable myEntitySupplierUseCallback;
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
*/
|
||||
public LazyDaoMethodOutcome(ResourcePersistentId theResourcePersistentId) {
|
||||
setPersistentId(theResourcePersistentId);
|
||||
}
|
||||
|
||||
@Override
|
||||
public IBasePersistedResource getEntity() {
|
||||
IBasePersistedResource retVal = super.getEntity();
|
||||
if (retVal == null) {
|
||||
tryToRunSupplier();
|
||||
retVal = super.getEntity();
|
||||
}
|
||||
return retVal;
|
||||
}
|
||||
|
||||
private void tryToRunSupplier() {
|
||||
if (myEntitySupplier != null) {
|
||||
|
||||
EntityAndResource entityAndResource = myEntitySupplier.get();
|
||||
setEntity(entityAndResource.getEntity());
|
||||
setResource(entityAndResource.getResource());
|
||||
setId(entityAndResource.getResource().getIdElement());
|
||||
myEntitySupplierUseCallback.run();
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public IIdType getId() {
|
||||
IIdType retVal = super.getId();
|
||||
if (retVal == null) {
|
||||
if (super.hasResource()) {
|
||||
retVal = getResource().getIdElement();
|
||||
setId(retVal);
|
||||
} else {
|
||||
if (myIdSupplier != null) {
|
||||
retVal = myIdSupplier.get();
|
||||
setId(retVal);
|
||||
}
|
||||
}
|
||||
}
|
||||
return retVal;
|
||||
}
|
||||
|
||||
@Override
|
||||
public IBaseResource getResource() {
|
||||
IBaseResource retVal = super.getResource();
|
||||
if (retVal == null) {
|
||||
tryToRunSupplier();
|
||||
retVal = super.getResource();
|
||||
}
|
||||
return retVal;
|
||||
}
|
||||
|
||||
public void setEntitySupplier(Supplier<EntityAndResource> theEntitySupplier) {
|
||||
myEntitySupplier = theEntitySupplier;
|
||||
}
|
||||
|
||||
public void setEntitySupplierUseCallback(Runnable theEntitySupplierUseCallback) {
|
||||
myEntitySupplierUseCallback = theEntitySupplierUseCallback;
|
||||
}
|
||||
|
||||
public void setIdSupplier(Supplier<IIdType> theIdSupplier) {
|
||||
myIdSupplier = theIdSupplier;
|
||||
}
|
||||
|
||||
|
||||
public static class EntityAndResource {
|
||||
private final IBasePersistedResource myEntity;
|
||||
private final IBaseResource myResource;
|
||||
|
||||
public EntityAndResource(IBasePersistedResource theEntity, IBaseResource theResource) {
|
||||
myEntity = theEntity;
|
||||
myResource = theResource;
|
||||
}
|
||||
|
||||
public IBasePersistedResource getEntity() {
|
||||
return myEntity;
|
||||
}
|
||||
|
||||
public IBaseResource getResource() {
|
||||
return myResource;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE9-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -610,6 +610,12 @@
|
|||
<version>2.5-20081211</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.jena</groupId>
|
||||
<artifactId>jena-arq</artifactId>
|
||||
<version>3.17.0</version>
|
||||
<scope>compile</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
|
||||
|
|
|
@ -160,7 +160,7 @@ public class GroupBulkItemReader extends BaseBulkItemReader implements ItemReade
|
|||
// }
|
||||
Map<String, String> sourceResourceIdToGoldenResourceIdMap = new HashMap<>();
|
||||
goldenResourceToSourcePidMap.forEach((key, value) -> {
|
||||
String goldenResourceId = myIdHelperService.translatePidIdToForcedId(new ResourcePersistentId(key)).orElse(key.toString());
|
||||
String goldenResourceId = myIdHelperService.translatePidIdToForcedIdWithCache(new ResourcePersistentId(key)).orElse(key.toString());
|
||||
Map<Long, Optional<String>> pidsToForcedIds = myIdHelperService.translatePidsToForcedIds(value);
|
||||
|
||||
Set<String> sourceResourceIds = pidsToForcedIds.entrySet().stream()
|
||||
|
|
|
@ -164,7 +164,6 @@ import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
|
|||
import javax.annotation.Nullable;
|
||||
import javax.annotation.PostConstruct;
|
||||
import java.util.Date;
|
||||
import java.util.concurrent.RejectedExecutionHandler;
|
||||
|
||||
/*
|
||||
* #%L
|
||||
|
|
|
@ -280,6 +280,8 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
retVal.setPartitionId(theEntity.getPartitionId());
|
||||
theEntity.setForcedId(retVal);
|
||||
}
|
||||
} else if (theEntity.getForcedId() != null) {
|
||||
retVal = theEntity.getForcedId();
|
||||
}
|
||||
|
||||
return retVal;
|
||||
|
|
|
@ -33,6 +33,7 @@ import ca.uhn.fhir.jpa.api.model.DeleteConflictList;
|
|||
import ca.uhn.fhir.jpa.api.model.DeleteMethodOutcome;
|
||||
import ca.uhn.fhir.jpa.api.model.ExpungeOptions;
|
||||
import ca.uhn.fhir.jpa.api.model.ExpungeOutcome;
|
||||
import ca.uhn.fhir.jpa.api.model.LazyDaoMethodOutcome;
|
||||
import ca.uhn.fhir.jpa.dao.expunge.DeleteExpungeService;
|
||||
import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
|
||||
import ca.uhn.fhir.jpa.delete.DeleteConflictService;
|
||||
|
@ -54,19 +55,20 @@ import ca.uhn.fhir.jpa.search.PersistedJpaBundleProvider;
|
|||
import ca.uhn.fhir.jpa.search.cache.SearchCacheStatusEnum;
|
||||
import ca.uhn.fhir.jpa.search.reindex.IResourceReindexingSvc;
|
||||
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
||||
import ca.uhn.fhir.rest.api.InterceptorInvocationTimingEnum;
|
||||
import ca.uhn.fhir.rest.api.SearchContainedModeEnum;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.jpa.util.JpaInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.jpa.util.MemoryCacheService;
|
||||
import ca.uhn.fhir.model.api.IQueryParameterType;
|
||||
import ca.uhn.fhir.model.dstu2.resource.ListResource;
|
||||
import ca.uhn.fhir.model.primitive.IdDt;
|
||||
import ca.uhn.fhir.rest.api.CacheControlDirective;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.api.EncodingEnum;
|
||||
import ca.uhn.fhir.rest.api.InterceptorInvocationTimingEnum;
|
||||
import ca.uhn.fhir.rest.api.MethodOutcome;
|
||||
import ca.uhn.fhir.rest.api.PatchTypeEnum;
|
||||
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
|
||||
import ca.uhn.fhir.rest.api.SearchContainedModeEnum;
|
||||
import ca.uhn.fhir.rest.api.ValidationModeEnum;
|
||||
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||
import ca.uhn.fhir.rest.api.server.IPreResourceAccessDetails;
|
||||
|
@ -135,6 +137,7 @@ import java.util.List;
|
|||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.UUID;
|
||||
import java.util.function.Supplier;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.defaultString;
|
||||
|
@ -173,6 +176,8 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
private Class<T> myResourceType;
|
||||
@Autowired
|
||||
private IRequestPartitionHelperSvc myPartitionHelperSvc;
|
||||
@Autowired
|
||||
private MemoryCacheService myMemoryCacheService;
|
||||
|
||||
@Override
|
||||
@Transactional
|
||||
|
@ -289,10 +294,26 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
throw new PreconditionFailedException(msg);
|
||||
} else if (match.size() == 1) {
|
||||
ResourcePersistentId pid = match.iterator().next();
|
||||
entity = myEntityManager.find(ResourceTable.class, pid.getId());
|
||||
IBaseResource resource = toResource(entity, false);
|
||||
|
||||
Supplier<LazyDaoMethodOutcome.EntityAndResource> entitySupplier = () -> {
|
||||
ResourceTable foundEntity = myEntityManager.find(ResourceTable.class, pid.getId());
|
||||
IBaseResource resource = toResource(foundEntity, false);
|
||||
theResource.setId(resource.getIdElement().getValue());
|
||||
return toMethodOutcome(theRequest, entity, resource).setCreated(false).setNop(true);
|
||||
return new LazyDaoMethodOutcome.EntityAndResource(foundEntity, resource);
|
||||
};
|
||||
|
||||
Supplier<IIdType> idSupplier = () -> {
|
||||
IIdType retVal = myIdHelperService.translatePidIdToForcedId(myFhirContext, myResourceName, pid);
|
||||
if (!retVal.hasVersionIdPart()) {
|
||||
return myMemoryCacheService.get(MemoryCacheService.CacheEnum.RESOURCE_CONDITIONAL_CREATE_VERSION, retVal, t -> {
|
||||
long version = myResourceTableDao.findCurrentVersionByPid(pid.getIdAsLong());
|
||||
return myFhirContext.getVersion().newIdType().setParts(retVal.getBaseUrl(), retVal.getResourceType(), retVal.getIdPart(), Long.toString(version));
|
||||
});
|
||||
}
|
||||
return retVal;
|
||||
};
|
||||
|
||||
return toMethodOutcomeLazy(theRequest, pid, entitySupplier, idSupplier).setCreated(false).setNop(true);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -355,6 +376,10 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
}
|
||||
}
|
||||
|
||||
if (theIfNoneExist != null) {
|
||||
myMatchResourceUrlService.matchUrlResolved(theIfNoneExist, new ResourcePersistentId(entity.getResourceId()));
|
||||
}
|
||||
|
||||
/*
|
||||
* If we aren't indexing (meaning we're probably executing a sub-operation within a transaction),
|
||||
* we'll manually increase the version. This is important because we want the updated version number
|
||||
|
@ -390,6 +415,14 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
String msg = getContext().getLocalizer().getMessageSanitized(BaseHapiFhirResourceDao.class, "successfulCreate", outcome.getId(), w.getMillisAndRestart());
|
||||
outcome.setOperationOutcome(createInfoOperationOutcome(msg));
|
||||
|
||||
String forcedId = null;
|
||||
if (updatedEntity.getForcedId() != null) {
|
||||
forcedId = updatedEntity.getForcedId().getForcedId();
|
||||
}
|
||||
if (myIdHelperService != null) {
|
||||
myIdHelperService.addResolvedPidToForcedId(new ResourcePersistentId(updatedEntity.getResourceId()), theRequestPartitionId, getResourceName(), forcedId);
|
||||
}
|
||||
|
||||
ourLog.debug(msg);
|
||||
return outcome;
|
||||
}
|
||||
|
@ -437,7 +470,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
|
||||
// Don't delete again if it's already deleted
|
||||
if (entity.getDeleted() != null) {
|
||||
DaoMethodOutcome outcome = new DaoMethodOutcome();
|
||||
DaoMethodOutcome outcome = new DaoMethodOutcome().setPersistentId(new ResourcePersistentId(entity.getResourceId()));
|
||||
outcome.setEntity(entity);
|
||||
|
||||
IIdType id = getContext().getVersion().newIdType();
|
||||
|
@ -1546,7 +1579,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
preProcessResourceForStorage(resource);
|
||||
preProcessResourceForStorage(theResource, theRequest, theTransactionDetails, thePerformIndexing);
|
||||
|
||||
final ResourceTable entity;
|
||||
ResourceTable entity = null;
|
||||
|
||||
IIdType resourceId;
|
||||
if (isNotBlank(theMatchUrl)) {
|
||||
|
@ -1572,9 +1605,25 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
assert resourceId.hasIdPart();
|
||||
|
||||
RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineReadPartitionForRequest(theRequest, getResourceName());
|
||||
|
||||
boolean create = false;
|
||||
|
||||
if (theRequest != null) {
|
||||
String existenceCheck = theRequest.getHeader(JpaConstants.HEADER_UPSERT_EXISTENCE_CHECK);
|
||||
if (JpaConstants.HEADER_UPSERT_EXISTENCE_CHECK_DISABLED.equals(existenceCheck)) {
|
||||
create = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (!create) {
|
||||
try {
|
||||
entity = readEntityLatestVersion(resourceId, requestPartitionId);
|
||||
} catch (ResourceNotFoundException e) {
|
||||
create = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (create) {
|
||||
requestPartitionId = myRequestPartitionHelperService.determineCreatePartitionForRequest(theRequest, theResource, getResourceName());
|
||||
return doCreateForPostOrPut(resource, null, thePerformIndexing, theTransactionDetails, theRequest, requestPartitionId);
|
||||
}
|
||||
|
@ -1616,6 +1665,8 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
DaoMethodOutcome outcome = toMethodOutcome(theRequest, entity, resource).setCreated(wasDeleted);
|
||||
outcome.setPreviousResource(oldResource);
|
||||
if (!outcome.isNop()) {
|
||||
// Technically this may not end up being right since we might not increment if the
|
||||
// contents turn out to be the same
|
||||
outcome.setId(outcome.getId().withVersion(Long.toString(outcome.getId().getVersionIdPartAsLong() + 1)));
|
||||
}
|
||||
return outcome;
|
||||
|
|
|
@ -29,11 +29,13 @@ import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
|||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
|
||||
import ca.uhn.fhir.jpa.api.model.LazyDaoMethodOutcome;
|
||||
import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource;
|
||||
import ca.uhn.fhir.jpa.model.entity.ModelConfig;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.jpa.searchparam.util.JpaParamUtil;
|
||||
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
|
||||
import ca.uhn.fhir.jpa.util.JpaInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.model.api.IQueryParameterAnd;
|
||||
|
@ -73,6 +75,7 @@ import java.util.IdentityHashMap;
|
|||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
import static ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.OO_SEVERITY_ERROR;
|
||||
import static ca.uhn.fhir.jpa.dao.BaseHapiFhirDao.OO_SEVERITY_INFO;
|
||||
|
@ -212,7 +215,7 @@ public abstract class BaseStorageDao {
|
|||
}
|
||||
|
||||
protected DaoMethodOutcome toMethodOutcome(RequestDetails theRequest, @Nonnull final IBasePersistedResource theEntity, @Nonnull IBaseResource theResource) {
|
||||
DaoMethodOutcome outcome = new DaoMethodOutcome();
|
||||
DaoMethodOutcome outcome = new DaoMethodOutcome().setPersistentId(theEntity.getPersistentId());
|
||||
|
||||
if (theEntity instanceof ResourceTable) {
|
||||
if (((ResourceTable) theEntity).isUnchangedInCurrentOperation()) {
|
||||
|
@ -269,6 +272,46 @@ public abstract class BaseStorageDao {
|
|||
return outcome;
|
||||
}
|
||||
|
||||
protected DaoMethodOutcome toMethodOutcomeLazy(RequestDetails theRequest, ResourcePersistentId theResourcePersistentId, @Nonnull final Supplier<LazyDaoMethodOutcome.EntityAndResource> theEntity, Supplier<IIdType> theIdSupplier) {
|
||||
LazyDaoMethodOutcome outcome = new LazyDaoMethodOutcome(theResourcePersistentId);
|
||||
|
||||
outcome.setEntitySupplier(theEntity);
|
||||
outcome.setIdSupplier(theIdSupplier);
|
||||
outcome.setEntitySupplierUseCallback(()->{
|
||||
// Interceptor broadcast: STORAGE_PREACCESS_RESOURCES
|
||||
if (outcome.getResource() != null) {
|
||||
SimplePreResourceAccessDetails accessDetails = new SimplePreResourceAccessDetails(outcome.getResource());
|
||||
HookParams params = new HookParams()
|
||||
.add(IPreResourceAccessDetails.class, accessDetails)
|
||||
.add(RequestDetails.class, theRequest)
|
||||
.addIfMatchesType(ServletRequestDetails.class, theRequest);
|
||||
JpaInterceptorBroadcaster.doCallHooks(getInterceptorBroadcaster(), theRequest, Pointcut.STORAGE_PREACCESS_RESOURCES, params);
|
||||
if (accessDetails.isDontReturnResourceAtIndex(0)) {
|
||||
outcome.setResource(null);
|
||||
}
|
||||
}
|
||||
|
||||
// Interceptor broadcast: STORAGE_PRESHOW_RESOURCES
|
||||
// Note that this will only fire if someone actually goes to use the
|
||||
// resource in a response (it's their responsibility to call
|
||||
// outcome.fireResourceViewCallback())
|
||||
outcome.registerResourceViewCallback(() -> {
|
||||
if (outcome.getResource() != null) {
|
||||
SimplePreResourceShowDetails showDetails = new SimplePreResourceShowDetails(outcome.getResource());
|
||||
HookParams params = new HookParams()
|
||||
.add(IPreResourceShowDetails.class, showDetails)
|
||||
.add(RequestDetails.class, theRequest)
|
||||
.addIfMatchesType(ServletRequestDetails.class, theRequest);
|
||||
JpaInterceptorBroadcaster.doCallHooks(getInterceptorBroadcaster(), theRequest, Pointcut.STORAGE_PRESHOW_RESOURCES, params);
|
||||
outcome.setResource(showDetails.getResource(0));
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
return outcome;
|
||||
}
|
||||
|
||||
|
||||
protected void doCallHooks(TransactionDetails theTransactionDetails, RequestDetails theRequestDetails, Pointcut thePointcut, HookParams theParams) {
|
||||
if (theTransactionDetails.isAcceptingDeferredInterceptorBroadcasts(thePointcut)) {
|
||||
theTransactionDetails.addDeferredInterceptorBroadcast(thePointcut, theParams);
|
||||
|
|
|
@ -226,11 +226,11 @@ public abstract class BaseTransactionProcessor {
|
|||
myVersionAdapter.setResponseLastModified(newEntry, lastModifier);
|
||||
|
||||
if (theRequestDetails != null) {
|
||||
if (outcome.getResource() != null) {
|
||||
String prefer = theRequestDetails.getHeader(Constants.HEADER_PREFER);
|
||||
PreferReturnEnum preferReturn = RestfulServerUtils.parsePreferHeader(null, prefer).getReturn();
|
||||
if (preferReturn != null) {
|
||||
if (preferReturn == PreferReturnEnum.REPRESENTATION) {
|
||||
if (outcome.getResource() != null) {
|
||||
outcome.fireResourceViewCallbacks();
|
||||
myVersionAdapter.setResource(newEntry, outcome.getResource());
|
||||
}
|
||||
|
@ -440,21 +440,21 @@ public abstract class BaseTransactionProcessor {
|
|||
* heavy load with lots of concurrent transactions using all available
|
||||
* database connections.
|
||||
*/
|
||||
TransactionCallback<Map<IBase, IBasePersistedResource>> txCallback = status -> {
|
||||
TransactionCallback<Map<IBase, IIdType>> txCallback = status -> {
|
||||
final Set<IIdType> allIds = new LinkedHashSet<>();
|
||||
final Map<IIdType, IIdType> idSubstitutions = new HashMap<>();
|
||||
final Map<IIdType, DaoMethodOutcome> idToPersistedOutcome = new HashMap<>();
|
||||
Map<IBase, IBasePersistedResource> retVal = doTransactionWriteOperations(theRequestDetails, theActionName, transactionDetails, allIds, idSubstitutions, idToPersistedOutcome, response, originalRequestOrder, entries, transactionStopWatch);
|
||||
Map<IBase, IIdType> retVal = doTransactionWriteOperations(theRequestDetails, theActionName, transactionDetails, allIds, idSubstitutions, idToPersistedOutcome, response, originalRequestOrder, entries, transactionStopWatch);
|
||||
|
||||
transactionStopWatch.startTask("Commit writes to database");
|
||||
return retVal;
|
||||
};
|
||||
Map<IBase, IBasePersistedResource> entriesToProcess = myHapiTransactionService.execute(theRequestDetails, txCallback);
|
||||
Map<IBase, IIdType> entriesToProcess = myHapiTransactionService.execute(theRequestDetails, txCallback);
|
||||
transactionStopWatch.endCurrentTask();
|
||||
|
||||
for (Map.Entry<IBase, IBasePersistedResource> nextEntry : entriesToProcess.entrySet()) {
|
||||
String responseLocation = nextEntry.getValue().getIdDt().toUnqualified().getValue();
|
||||
String responseEtag = nextEntry.getValue().getIdDt().getVersionIdPart();
|
||||
for (Map.Entry<IBase, IIdType> nextEntry : entriesToProcess.entrySet()) {
|
||||
String responseLocation = nextEntry.getValue().toUnqualified().getValue();
|
||||
String responseEtag = nextEntry.getValue().getVersionIdPart();
|
||||
myVersionAdapter.setResponseLocation(nextEntry.getKey(), responseLocation);
|
||||
myVersionAdapter.setResponseETag(nextEntry.getKey(), responseEtag);
|
||||
}
|
||||
|
@ -576,7 +576,7 @@ public abstract class BaseTransactionProcessor {
|
|||
myModelConfig = theModelConfig;
|
||||
}
|
||||
|
||||
private Map<IBase, IBasePersistedResource> doTransactionWriteOperations(final RequestDetails theRequest, String theActionName, TransactionDetails theTransactionDetails, Set<IIdType> theAllIds,
|
||||
private Map<IBase, IIdType> doTransactionWriteOperations(final RequestDetails theRequest, String theActionName, TransactionDetails theTransactionDetails, Set<IIdType> theAllIds,
|
||||
Map<IIdType, IIdType> theIdSubstitutions, Map<IIdType, DaoMethodOutcome> theIdToPersistedOutcome, IBaseBundle theResponse, IdentityHashMap<IBase, Integer> theOriginalRequestOrder, List<IBase> theEntries, StopWatch theTransactionStopWatch) {
|
||||
|
||||
theTransactionDetails.beginAcceptingDeferredInterceptorBroadcasts(
|
||||
|
@ -588,8 +588,8 @@ public abstract class BaseTransactionProcessor {
|
|||
|
||||
Set<String> deletedResources = new HashSet<>();
|
||||
DeleteConflictList deleteConflicts = new DeleteConflictList();
|
||||
Map<IBase, IBasePersistedResource> entriesToProcess = new IdentityHashMap<>();
|
||||
Set<IBasePersistedResource> nonUpdatedEntities = new HashSet<>();
|
||||
Map<IBase, IIdType> entriesToProcess = new IdentityHashMap<>();
|
||||
Set<IIdType> nonUpdatedEntities = new HashSet<>();
|
||||
Set<IBasePersistedResource> updatedEntities = new HashSet<>();
|
||||
List<IBaseResource> updatedResources = new ArrayList<>();
|
||||
Map<String, Class<? extends IBaseResource>> conditionalRequestUrls = new HashMap<>();
|
||||
|
@ -726,9 +726,9 @@ public abstract class BaseTransactionProcessor {
|
|||
if (nextResourceId != null) {
|
||||
handleTransactionCreateOrUpdateOutcome(theIdSubstitutions, theIdToPersistedOutcome, nextResourceId, outcome, nextRespEntry, resourceType, res, theRequest);
|
||||
}
|
||||
entriesToProcess.put(nextRespEntry, outcome.getEntity());
|
||||
entriesToProcess.put(nextRespEntry, outcome.getId());
|
||||
if (outcome.getCreated() == false) {
|
||||
nonUpdatedEntities.add(outcome.getEntity());
|
||||
nonUpdatedEntities.add(outcome.getId());
|
||||
} else {
|
||||
if (isNotBlank(matchUrl)) {
|
||||
conditionalRequestUrls.put(matchUrl, res.getClass());
|
||||
|
@ -749,7 +749,7 @@ public abstract class BaseTransactionProcessor {
|
|||
DaoMethodOutcome outcome = dao.delete(deleteId, deleteConflicts, theRequest, theTransactionDetails);
|
||||
if (outcome.getEntity() != null) {
|
||||
deletedResources.add(deleteId.getValueAsString());
|
||||
entriesToProcess.put(nextRespEntry, outcome.getEntity());
|
||||
entriesToProcess.put(nextRespEntry, outcome.getId());
|
||||
}
|
||||
}
|
||||
} else {
|
||||
|
@ -812,7 +812,7 @@ public abstract class BaseTransactionProcessor {
|
|||
}
|
||||
|
||||
handleTransactionCreateOrUpdateOutcome(theIdSubstitutions, theIdToPersistedOutcome, nextResourceId, outcome, nextRespEntry, resourceType, res, theRequest);
|
||||
entriesToProcess.put(nextRespEntry, outcome.getEntity());
|
||||
entriesToProcess.put(nextRespEntry, outcome.getId());
|
||||
break;
|
||||
}
|
||||
case "PATCH": {
|
||||
|
@ -916,7 +916,7 @@ public abstract class BaseTransactionProcessor {
|
|||
}
|
||||
DeleteConflictService.validateDeleteConflictsEmptyOrThrowException(myContext, deleteConflicts);
|
||||
|
||||
theIdToPersistedOutcome.entrySet().forEach(t -> theTransactionDetails.addResolvedResourceId(t.getKey(), t.getValue().getEntity().getPersistentId()));
|
||||
theIdToPersistedOutcome.entrySet().forEach(t -> theTransactionDetails.addResolvedResourceId(t.getKey(), t.getValue().getPersistentId()));
|
||||
|
||||
/*
|
||||
* Perform ID substitutions and then index each resource we have saved
|
||||
|
@ -931,6 +931,10 @@ public abstract class BaseTransactionProcessor {
|
|||
ourLog.debug("Have indexed {} entities out of {} in transaction", i, theIdToPersistedOutcome.values().size());
|
||||
}
|
||||
|
||||
if (nextOutcome.isNop()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
IBaseResource nextResource = nextOutcome.getResource();
|
||||
if (nextResource == null) {
|
||||
continue;
|
||||
|
@ -942,11 +946,23 @@ public abstract class BaseTransactionProcessor {
|
|||
for (ResourceReferenceInfo nextRef : allRefs) {
|
||||
IBaseReference resourceReference = nextRef.getResourceReference();
|
||||
IIdType nextId = resourceReference.getReferenceElement();
|
||||
IIdType newId = null;
|
||||
if (!nextId.hasIdPart()) {
|
||||
if (resourceReference.getResource() != null) {
|
||||
IIdType targetId = resourceReference.getResource().getIdElement();
|
||||
if (theIdSubstitutions.containsValue(targetId)) {
|
||||
newId = targetId;
|
||||
} else {
|
||||
throw new InternalErrorException("References by resource with no reference ID are not supported in DAO layer");
|
||||
}
|
||||
} else {
|
||||
continue;
|
||||
}
|
||||
if (theIdSubstitutions.containsKey(nextId)) {
|
||||
IIdType newId = theIdSubstitutions.get(nextId);
|
||||
}
|
||||
if (newId != null || theIdSubstitutions.containsKey(nextId)) {
|
||||
if (newId == null) {
|
||||
newId = theIdSubstitutions.get(nextId);
|
||||
}
|
||||
ourLog.debug(" * Replacing resource ref {} with {}", nextId, newId);
|
||||
if (referencesToVersion.contains(resourceReference)) {
|
||||
resourceReference.setReference(newId.getValue());
|
||||
|
@ -993,11 +1009,27 @@ public abstract class BaseTransactionProcessor {
|
|||
IFhirResourceDao<? extends IBaseResource> dao = myDaoRegistry.getResourceDao(nextResource.getClass());
|
||||
IJpaDao jpaDao = (IJpaDao) dao;
|
||||
|
||||
IBasePersistedResource updateOutcome = null;
|
||||
if (updatedEntities.contains(nextOutcome.getEntity())) {
|
||||
jpaDao.updateInternal(theRequest, nextResource, true, false, nextOutcome.getEntity(), nextResource.getIdElement(), nextOutcome.getPreviousResource(), theTransactionDetails);
|
||||
} else if (!nonUpdatedEntities.contains(nextOutcome.getEntity())) {
|
||||
jpaDao.updateEntity(theRequest, nextResource, nextOutcome.getEntity(), deletedTimestampOrNull, true, false, theTransactionDetails, false, true);
|
||||
updateOutcome = jpaDao.updateInternal(theRequest, nextResource, true, false, nextOutcome.getEntity(), nextResource.getIdElement(), nextOutcome.getPreviousResource(), theTransactionDetails);
|
||||
} else if (!nonUpdatedEntities.contains(nextOutcome.getId())) {
|
||||
updateOutcome = jpaDao.updateEntity(theRequest, nextResource, nextOutcome.getEntity(), deletedTimestampOrNull, true, false, theTransactionDetails, false, true);
|
||||
}
|
||||
|
||||
// Make sure we reflect the actual final version for the resource.
|
||||
if (updateOutcome != null) {
|
||||
IIdType newId = updateOutcome.getIdDt();
|
||||
for (IIdType nextEntry : entriesToProcess.values()) {
|
||||
if (nextEntry.getResourceType().equals(newId.getResourceType())) {
|
||||
if (nextEntry.getIdPart().equals(newId.getIdPart())) {
|
||||
if (!nextEntry.hasVersionIdPart() || !nextEntry.getVersionIdPart().equals(newId.getVersionIdPart())) {
|
||||
nextEntry.setParts(nextEntry.getBaseUrl(), nextEntry.getResourceType(), nextEntry.getIdPart(), newId.getVersionIdPart());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
theTransactionStopWatch.endCurrentTask();
|
||||
|
|
|
@ -25,22 +25,26 @@ import ca.uhn.fhir.context.RuntimeResourceDefinition;
|
|||
import ca.uhn.fhir.interceptor.api.HookParams;
|
||||
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage;
|
||||
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.jpa.util.JpaInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.jpa.util.MemoryCacheService;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
||||
import ca.uhn.fhir.util.StopWatch;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.Set;
|
||||
|
||||
@Service
|
||||
|
@ -52,9 +56,20 @@ public class MatchResourceUrlService {
|
|||
@Autowired
|
||||
private MatchUrlService myMatchUrlService;
|
||||
@Autowired
|
||||
private DaoConfig myDaoConfig;
|
||||
@Autowired
|
||||
private IInterceptorBroadcaster myInterceptorBroadcaster;
|
||||
@Autowired
|
||||
private MemoryCacheService myMemoryCacheService;
|
||||
|
||||
public <R extends IBaseResource> Set<ResourcePersistentId> processMatchUrl(String theMatchUrl, Class<R> theResourceType, RequestDetails theRequest) {
|
||||
if (myDaoConfig.getMatchUrlCache()) {
|
||||
ResourcePersistentId existing = myMemoryCacheService.getIfPresent(MemoryCacheService.CacheEnum.MATCH_URL, theMatchUrl);
|
||||
if (existing != null) {
|
||||
return Collections.singleton(existing);
|
||||
}
|
||||
}
|
||||
|
||||
RuntimeResourceDefinition resourceDef = myContext.getResourceDefinition(theResourceType);
|
||||
SearchParameterMap paramMap = myMatchUrlService.translateMatchUrl(theMatchUrl, resourceDef);
|
||||
if (paramMap.isEmpty() && paramMap.getLastUpdated() == null) {
|
||||
|
@ -62,7 +77,13 @@ public class MatchResourceUrlService {
|
|||
}
|
||||
paramMap.setLoadSynchronous(true);
|
||||
|
||||
return search(paramMap, theResourceType, theRequest);
|
||||
Set<ResourcePersistentId> retVal = search(paramMap, theResourceType, theRequest);
|
||||
|
||||
if (myDaoConfig.getMatchUrlCache() && retVal.size() == 1) {
|
||||
myMemoryCacheService.put(MemoryCacheService.CacheEnum.MATCH_URL, theMatchUrl, retVal.iterator().next());
|
||||
}
|
||||
|
||||
return retVal;
|
||||
}
|
||||
|
||||
public <R extends IBaseResource> Set<ResourcePersistentId> search(SearchParameterMap theParamMap, Class<R> theResourceType, RequestDetails theRequest) {
|
||||
|
@ -88,4 +109,11 @@ public class MatchResourceUrlService {
|
|||
}
|
||||
|
||||
|
||||
public void matchUrlResolved(String theMatchUrl, ResourcePersistentId theResourcePersistentId) {
|
||||
Validate.notBlank(theMatchUrl);
|
||||
Validate.notNull(theResourcePersistentId);
|
||||
if (myDaoConfig.getMatchUrlCache()) {
|
||||
myMemoryCacheService.put(MemoryCacheService.CacheEnum.MATCH_URL, theMatchUrl, theResourcePersistentId);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -91,4 +91,7 @@ public interface IResourceTableDao extends JpaRepository<ResourceTable, Long> {
|
|||
*/
|
||||
@Query("SELECT t.myResourceType, t.myId, t.myDeleted FROM ResourceTable t WHERE t.myId IN (:pid) AND t.myPartitionIdValue IS NULL")
|
||||
Collection<Object[]> findLookupFieldsByResourcePidInPartitionNull(@Param("pid") List<Long> thePids);
|
||||
|
||||
@Query("SELECT t.myVersion FROM ResourceTable t WHERE t.myId = :pid")
|
||||
long findCurrentVersionByPid(@Param("pid") Long thePid);
|
||||
}
|
||||
|
|
|
@ -62,10 +62,8 @@ import org.springframework.data.domain.Pageable;
|
|||
import org.springframework.data.domain.Slice;
|
||||
import org.springframework.data.domain.SliceImpl;
|
||||
import org.springframework.stereotype.Service;
|
||||
import org.springframework.transaction.TransactionManager;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
import org.springframework.transaction.support.TransactionSynchronization;
|
||||
import org.springframework.transaction.support.TransactionSynchronizationAdapter;
|
||||
import org.springframework.transaction.support.TransactionSynchronizationManager;
|
||||
|
||||
import java.util.Collections;
|
||||
|
|
|
@ -136,20 +136,25 @@ public class IdHelperService {
|
|||
public ResourcePersistentId resolveResourcePersistentIds(@Nonnull RequestPartitionId theRequestPartitionId, String theResourceType, String theId) {
|
||||
Validate.notNull(theId, "theId must not be null");
|
||||
|
||||
Long retVal;
|
||||
ResourcePersistentId retVal;
|
||||
if (myDaoConfig.getResourceClientIdStrategy() == DaoConfig.ClientIdStrategyEnum.ANY || !isValidPid(theId)) {
|
||||
if (myDaoConfig.isDeleteEnabled()) {
|
||||
retVal = resolveResourceIdentity(theRequestPartitionId, theResourceType, theId).getResourceId();
|
||||
retVal = new ResourcePersistentId(resolveResourceIdentity(theRequestPartitionId, theResourceType, theId).getResourceId());
|
||||
} else {
|
||||
String key = RequestPartitionId.stringifyForKey(theRequestPartitionId) + "/" + theResourceType + "/" + theId;
|
||||
retVal = myMemoryCacheService.get(MemoryCacheService.CacheEnum.PERSISTENT_ID, key, t -> resolveResourceIdentity(theRequestPartitionId, theResourceType, theId).getResourceId());
|
||||
String key = toForcedIdToPidKey(theRequestPartitionId, theResourceType, theId);
|
||||
retVal = myMemoryCacheService.get(MemoryCacheService.CacheEnum.FORCED_ID_TO_PID, key, t -> new ResourcePersistentId(resolveResourceIdentity(theRequestPartitionId, theResourceType, theId).getResourceId()));
|
||||
}
|
||||
|
||||
} else {
|
||||
retVal = Long.parseLong(theId);
|
||||
retVal = new ResourcePersistentId(Long.parseLong(theId));
|
||||
}
|
||||
|
||||
return new ResourcePersistentId(retVal);
|
||||
return retVal;
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
private String toForcedIdToPidKey(@Nonnull RequestPartitionId theRequestPartitionId, String theResourceType, String theId) {
|
||||
return RequestPartitionId.stringifyForKey(theRequestPartitionId) + "/" + theResourceType + "/" + theId;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -237,7 +242,7 @@ public class IdHelperService {
|
|||
public IIdType translatePidIdToForcedId(FhirContext theCtx, String theResourceType, ResourcePersistentId theId) {
|
||||
IIdType retVal = theCtx.getVersion().newIdType();
|
||||
|
||||
Optional<String> forcedId = translatePidIdToForcedId(theId);
|
||||
Optional<String> forcedId = translatePidIdToForcedIdWithCache(theId);
|
||||
if (forcedId.isPresent()) {
|
||||
retVal.setValue(theResourceType + '/' + forcedId.get());
|
||||
} else {
|
||||
|
@ -248,8 +253,8 @@ public class IdHelperService {
|
|||
}
|
||||
|
||||
|
||||
public Optional<String> translatePidIdToForcedId(ResourcePersistentId theId) {
|
||||
return myMemoryCacheService.get(MemoryCacheService.CacheEnum.FORCED_ID, theId.getIdAsLong(), pid -> myForcedIdDao.findByResourcePid(pid).map(t -> t.getForcedId()));
|
||||
public Optional<String> translatePidIdToForcedIdWithCache(ResourcePersistentId theId) {
|
||||
return myMemoryCacheService.get(MemoryCacheService.CacheEnum.PID_TO_FORCED_ID, theId.getIdAsLong(), pid -> myForcedIdDao.findByResourcePid(pid).map(t -> t.getForcedId()));
|
||||
}
|
||||
|
||||
private ListMultimap<String, String> organizeIdsByResourceType(Collection<IIdType> theIds) {
|
||||
|
@ -404,7 +409,7 @@ public class IdHelperService {
|
|||
|
||||
}
|
||||
public Map<Long, Optional<String>> translatePidsToForcedIds(Set<Long> thePids) {
|
||||
Map<Long, Optional<String>> retVal = new HashMap<>(myMemoryCacheService.getAllPresent(MemoryCacheService.CacheEnum.FORCED_ID, thePids));
|
||||
Map<Long, Optional<String>> retVal = new HashMap<>(myMemoryCacheService.getAllPresent(MemoryCacheService.CacheEnum.PID_TO_FORCED_ID, thePids));
|
||||
|
||||
List<Long> remainingPids = thePids
|
||||
.stream()
|
||||
|
@ -418,7 +423,7 @@ public class IdHelperService {
|
|||
Long nextResourcePid = forcedId.getResourceId();
|
||||
Optional<String> nextForcedId = Optional.of(forcedId.getForcedId());
|
||||
retVal.put(nextResourcePid, nextForcedId);
|
||||
myMemoryCacheService.put(MemoryCacheService.CacheEnum.FORCED_ID, nextResourcePid, nextForcedId);
|
||||
myMemoryCacheService.put(MemoryCacheService.CacheEnum.PID_TO_FORCED_ID, nextResourcePid, nextForcedId);
|
||||
}
|
||||
});
|
||||
|
||||
|
@ -428,7 +433,7 @@ public class IdHelperService {
|
|||
.collect(Collectors.toList());
|
||||
for (Long nextResourcePid : remainingPids) {
|
||||
retVal.put(nextResourcePid, Optional.empty());
|
||||
myMemoryCacheService.put(MemoryCacheService.CacheEnum.FORCED_ID, nextResourcePid, Optional.empty());
|
||||
myMemoryCacheService.put(MemoryCacheService.CacheEnum.PID_TO_FORCED_ID, nextResourcePid, Optional.empty());
|
||||
}
|
||||
|
||||
return retVal;
|
||||
|
@ -481,6 +486,19 @@ public class IdHelperService {
|
|||
return optionalResource.get().getIdDt().toVersionless();
|
||||
}
|
||||
|
||||
/**
|
||||
* Pre-cache a PID-to-Resource-ID mapping for later retrieval by {@link #translatePidsToForcedIds(Set)} and related methods
|
||||
*/
|
||||
public void addResolvedPidToForcedId(ResourcePersistentId theResourcePersistentId, @Nonnull RequestPartitionId theRequestPartitionId, String theResourceType, @Nullable String theForcedId) {
|
||||
if (theForcedId != null) {
|
||||
myMemoryCacheService.put(MemoryCacheService.CacheEnum.PID_TO_FORCED_ID, theResourcePersistentId.getIdAsLong(), Optional.of(theForcedId));
|
||||
String key = toForcedIdToPidKey(theRequestPartitionId, theResourceType, theForcedId);
|
||||
myMemoryCacheService.put(MemoryCacheService.CacheEnum.FORCED_ID_TO_PID, key, theResourcePersistentId);
|
||||
}else {
|
||||
myMemoryCacheService.put(MemoryCacheService.CacheEnum.PID_TO_FORCED_ID, theResourcePersistentId.getIdAsLong(), Optional.empty());
|
||||
}
|
||||
}
|
||||
|
||||
public static boolean isValidPid(IIdType theId) {
|
||||
if (theId == null) {
|
||||
return false;
|
||||
|
|
|
@ -362,6 +362,10 @@ public class SearchBuilder implements ISearchBuilder {
|
|||
SearchQueryBuilder sqlBuilder = new SearchQueryBuilder(myContext, myDaoConfig.getModelConfig(), myPartitionSettings, myRequestPartitionId, sqlBuilderResourceName, mySqlBuilderFactory, myDialectProvider, theCount);
|
||||
QueryStack queryStack3 = new QueryStack(theParams, myDaoConfig, myDaoConfig.getModelConfig(), myContext, sqlBuilder, mySearchParamRegistry, myPartitionSettings);
|
||||
|
||||
if (theParams.keySet().size() > 1 || theParams.getSort() != null || theParams.keySet().contains(Constants.PARAM_HAS)) {
|
||||
sqlBuilder.setNeedResourceTableRoot(true);
|
||||
}
|
||||
|
||||
JdbcTemplate jdbcTemplate = new JdbcTemplate(myEntityManagerFactory.getDataSource());
|
||||
jdbcTemplate.setFetchSize(myFetchSize);
|
||||
if (theMaximumResults != null) {
|
||||
|
|
|
@ -101,12 +101,13 @@ public class SearchQueryBuilder {
|
|||
private BaseJoiningPredicateBuilder myFirstPredicateBuilder;
|
||||
private boolean dialectIsMsSql;
|
||||
private boolean dialectIsMySql;
|
||||
private boolean myNeedResourceTableRoot;
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
*/
|
||||
public SearchQueryBuilder(FhirContext theFhirContext, ModelConfig theModelConfig, PartitionSettings thePartitionSettings, RequestPartitionId theRequestPartitionId, String theResourceType, SqlObjectFactory theSqlBuilderFactory, HibernatePropertiesProvider theDialectProvider, boolean theCountQuery) {
|
||||
this(theFhirContext, theModelConfig, thePartitionSettings, theRequestPartitionId, theResourceType, theSqlBuilderFactory, UUID.randomUUID().toString() + "-", theDialectProvider.getDialect(), theCountQuery, new ArrayList<>());
|
||||
this(theFhirContext, theModelConfig, thePartitionSettings, theRequestPartitionId, theResourceType, theSqlBuilderFactory, UUID.randomUUID() + "-", theDialectProvider.getDialect(), theCountQuery, new ArrayList<>());
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -310,12 +311,17 @@ public class SearchQueryBuilder {
|
|||
addJoin(fromTable, toTable, theSourceJoinColumn, toColumn);
|
||||
} else {
|
||||
if (myFirstPredicateBuilder == null) {
|
||||
ResourceTablePredicateBuilder root;
|
||||
|
||||
BaseJoiningPredicateBuilder root;
|
||||
if (!myNeedResourceTableRoot) {
|
||||
root = thePredicateBuilder;
|
||||
} else {
|
||||
if (thePredicateBuilder instanceof ResourceTablePredicateBuilder) {
|
||||
root = (ResourceTablePredicateBuilder) thePredicateBuilder;
|
||||
root = thePredicateBuilder;
|
||||
} else {
|
||||
root = mySqlBuilderFactory.resourceTable(this);
|
||||
}
|
||||
}
|
||||
|
||||
if (myCountQuery) {
|
||||
mySelect.addCustomColumns(FunctionCall.count().setIsDistinct(true).addColumnParams(root.getResourceIdColumn()));
|
||||
|
@ -325,7 +331,7 @@ public class SearchQueryBuilder {
|
|||
mySelect.addFromTable(root.getTable());
|
||||
myFirstPredicateBuilder = root;
|
||||
|
||||
if (thePredicateBuilder instanceof ResourceTablePredicateBuilder) {
|
||||
if (!myNeedResourceTableRoot || (thePredicateBuilder instanceof ResourceTablePredicateBuilder)) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
@ -680,4 +686,15 @@ public class SearchQueryBuilder {
|
|||
mySelect.addCustomOrderings(orderObject);
|
||||
}
|
||||
|
||||
/**
|
||||
* If set to true (default is false), force the generated SQL to start
|
||||
* with the {@link ca.uhn.fhir.jpa.model.entity.ResourceTable HFJ_RESOURCE}
|
||||
* table at the root of the query.
|
||||
*
|
||||
* This seems to perform better if there are multiple joins on the
|
||||
* resource ID table.
|
||||
*/
|
||||
public void setNeedResourceTableRoot(boolean theNeedResourceTableRoot) {
|
||||
myNeedResourceTableRoot = theNeedResourceTableRoot;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,15 +21,16 @@ package ca.uhn.fhir.jpa.util;
|
|||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.model.TranslationQuery;
|
||||
import com.github.benmanes.caffeine.cache.Cache;
|
||||
import com.github.benmanes.caffeine.cache.Caffeine;
|
||||
import org.apache.commons.lang3.tuple.Pair;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import javax.annotation.PostConstruct;
|
||||
import java.util.EnumMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.function.Function;
|
||||
|
||||
|
@ -62,7 +63,7 @@ public class MemoryCacheService {
|
|||
case TAG_DEFINITION:
|
||||
case PERSISTENT_ID:
|
||||
case RESOURCE_LOOKUP:
|
||||
case FORCED_ID:
|
||||
case PID_TO_FORCED_ID:
|
||||
default:
|
||||
timeoutSeconds = 60;
|
||||
break;
|
||||
|
@ -76,15 +77,18 @@ public class MemoryCacheService {
|
|||
|
||||
|
||||
public <K, T> T get(CacheEnum theCache, K theKey, Function<K, T> theSupplier) {
|
||||
assert theCache.myKeyType.isAssignableFrom(theKey.getClass());
|
||||
Cache<K, T> cache = getCache(theCache);
|
||||
return cache.get(theKey, theSupplier);
|
||||
}
|
||||
|
||||
public <K, V> V getIfPresent(CacheEnum theCache, K theKey) {
|
||||
assert theCache.myKeyType.isAssignableFrom(theKey.getClass());
|
||||
return (V) getCache(theCache).getIfPresent(theKey);
|
||||
}
|
||||
|
||||
public <K, V> void put(CacheEnum theCache, K theKey, V theValue) {
|
||||
assert theCache.myKeyType.isAssignableFrom(theKey.getClass());
|
||||
getCache(theCache).put(theKey, theValue);
|
||||
}
|
||||
|
||||
|
@ -102,13 +106,21 @@ public class MemoryCacheService {
|
|||
|
||||
public enum CacheEnum {
|
||||
|
||||
TAG_DEFINITION,
|
||||
PERSISTENT_ID,
|
||||
RESOURCE_LOOKUP,
|
||||
FORCED_ID,
|
||||
CONCEPT_TRANSLATION,
|
||||
CONCEPT_TRANSLATION_REVERSE
|
||||
TAG_DEFINITION(Pair.class),
|
||||
PERSISTENT_ID(String.class),
|
||||
RESOURCE_LOOKUP(String.class),
|
||||
FORCED_ID_TO_PID(String.class),
|
||||
PID_TO_FORCED_ID(Long.class),
|
||||
CONCEPT_TRANSLATION(TranslationQuery.class),
|
||||
MATCH_URL(String.class),
|
||||
CONCEPT_TRANSLATION_REVERSE(TranslationQuery.class),
|
||||
RESOURCE_CONDITIONAL_CREATE_VERSION(IIdType.class);
|
||||
|
||||
private final Class<?> myKeyType;
|
||||
|
||||
CacheEnum(Class<?> theKeyType) {
|
||||
myKeyType = theKeyType;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -348,7 +348,7 @@ public class FhirResourceDaoCreatePlaceholdersR4Test extends BaseJpaR4Test {
|
|||
|
||||
Observation createdObs = myObservationDao.read(id);
|
||||
ourLog.info(myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(createdObs));
|
||||
assertEquals("Patient/ABC", obsToCreate.getSubject().getReference());
|
||||
assertEquals("Patient/ABC", createdObs.getSubject().getReference());
|
||||
|
||||
}
|
||||
|
||||
|
|
|
@ -1,11 +1,13 @@
|
|||
package ca.uhn.fhir.jpa.dao.r4;
|
||||
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.jpa.util.SqlQuery;
|
||||
import ca.uhn.fhir.rest.api.SortSpec;
|
||||
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||
import ca.uhn.fhir.rest.param.ReferenceParam;
|
||||
import ca.uhn.fhir.util.BundleBuilder;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.Bundle;
|
||||
|
@ -25,11 +27,15 @@ import org.junit.jupiter.api.BeforeEach;
|
|||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.function.Supplier;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.containsInAnyOrder;
|
||||
import static org.hamcrest.Matchers.empty;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.mockito.ArgumentMatchers.eq;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
public class FhirResourceDaoR4QueryCountTest extends BaseJpaR4Test {
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirResourceDaoR4QueryCountTest.class);
|
||||
|
@ -39,6 +45,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseJpaR4Test {
|
|||
myDaoConfig.setResourceMetaCountHardLimit(new DaoConfig().getResourceMetaCountHardLimit());
|
||||
myDaoConfig.setIndexMissingFields(new DaoConfig().getIndexMissingFields());
|
||||
myDaoConfig.setDeleteEnabled(new DaoConfig().isDeleteEnabled());
|
||||
myDaoConfig.setMatchUrlCache(new DaoConfig().getMatchUrlCache());
|
||||
}
|
||||
|
||||
@BeforeEach
|
||||
|
@ -217,6 +224,80 @@ public class FhirResourceDaoR4QueryCountTest extends BaseJpaR4Test {
|
|||
assertEquals(0, myCaptureQueriesListener.getDeleteQueriesForCurrentThread().size());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCreateWithClientAssignedId_CheckDisabledMode() {
|
||||
when(mySrd.getHeader(eq(JpaConstants.HEADER_UPSERT_EXISTENCE_CHECK))).thenReturn(JpaConstants.HEADER_UPSERT_EXISTENCE_CHECK_DISABLED);
|
||||
|
||||
myCaptureQueriesListener.clear();
|
||||
runInTransaction(() -> {
|
||||
Patient p = new Patient();
|
||||
p.setId("AAA");
|
||||
p.getMaritalStatus().setText("123");
|
||||
return myPatientDao.update(p, mySrd).getId().toUnqualified();
|
||||
});
|
||||
|
||||
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||
assertEquals(0, myCaptureQueriesListener.getSelectQueriesForCurrentThread().size());
|
||||
myCaptureQueriesListener.logUpdateQueriesForCurrentThread();
|
||||
assertEquals(0, myCaptureQueriesListener.getUpdateQueriesForCurrentThread().size());
|
||||
myCaptureQueriesListener.logInsertQueriesForCurrentThread();
|
||||
assertEquals(4, myCaptureQueriesListener.getInsertQueriesForCurrentThread().size());
|
||||
myCaptureQueriesListener.logDeleteQueriesForCurrentThread();
|
||||
assertEquals(0, myCaptureQueriesListener.getDeleteQueriesForCurrentThread().size());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testUpdateWithClientAssignedId_DeletesDisabled() {
|
||||
myDaoConfig.setIndexMissingFields(DaoConfig.IndexEnabledEnum.DISABLED);
|
||||
myDaoConfig.setDeleteEnabled(false);
|
||||
|
||||
runInTransaction(() -> {
|
||||
Patient p = new Patient();
|
||||
p.setId("AAA");
|
||||
p.getMaritalStatus().setText("123");
|
||||
myPatientDao.update(p).getId().toUnqualified();
|
||||
});
|
||||
|
||||
|
||||
// Second time
|
||||
|
||||
myCaptureQueriesListener.clear();
|
||||
runInTransaction(() -> {
|
||||
Patient p = new Patient();
|
||||
p.setId("AAA");
|
||||
p.getMaritalStatus().setText("456");
|
||||
myPatientDao.update(p).getId().toUnqualified();
|
||||
});
|
||||
|
||||
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||
assertEquals(3, myCaptureQueriesListener.getSelectQueriesForCurrentThread().size());
|
||||
myCaptureQueriesListener.logUpdateQueriesForCurrentThread();
|
||||
assertEquals(1, myCaptureQueriesListener.getUpdateQueriesForCurrentThread().size());
|
||||
myCaptureQueriesListener.logInsertQueriesForCurrentThread();
|
||||
assertEquals(1, myCaptureQueriesListener.getInsertQueriesForCurrentThread().size());
|
||||
myCaptureQueriesListener.logDeleteQueriesForCurrentThread();
|
||||
assertEquals(0, myCaptureQueriesListener.getDeleteQueriesForCurrentThread().size());
|
||||
|
||||
// Third time (caches all loaded by now)
|
||||
|
||||
myCaptureQueriesListener.clear();
|
||||
runInTransaction(() -> {
|
||||
Patient p = new Patient();
|
||||
p.setId("AAA");
|
||||
p.getMaritalStatus().setText("789");
|
||||
myPatientDao.update(p).getId().toUnqualified();
|
||||
});
|
||||
|
||||
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||
assertEquals(3, myCaptureQueriesListener.getSelectQueriesForCurrentThread().size());
|
||||
myCaptureQueriesListener.logUpdateQueriesForCurrentThread();
|
||||
assertEquals(1, myCaptureQueriesListener.getUpdateQueriesForCurrentThread().size());
|
||||
myCaptureQueriesListener.logInsertQueriesForCurrentThread();
|
||||
assertEquals(1, myCaptureQueriesListener.getInsertQueriesForCurrentThread().size());
|
||||
myCaptureQueriesListener.logDeleteQueriesForCurrentThread();
|
||||
assertEquals(0, myCaptureQueriesListener.getDeleteQueriesForCurrentThread().size());
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testReferenceToForcedId() {
|
||||
|
@ -343,7 +424,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseJpaR4Test {
|
|||
});
|
||||
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||
// Perform count, Search history table, resolve forced IDs
|
||||
assertEquals(3, myCaptureQueriesListener.getSelectQueriesForCurrentThread().size());
|
||||
assertEquals(2, myCaptureQueriesListener.getSelectQueriesForCurrentThread().size());
|
||||
assertNoPartitionSelectors();
|
||||
myCaptureQueriesListener.logUpdateQueriesForCurrentThread();
|
||||
assertEquals(0, myCaptureQueriesListener.getUpdateQueriesForCurrentThread().size());
|
||||
|
@ -405,7 +486,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseJpaR4Test {
|
|||
});
|
||||
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||
// Perform count, Search history table, resolve forced IDs, load tags (x3)
|
||||
assertEquals(6, myCaptureQueriesListener.getSelectQueriesForCurrentThread().size());
|
||||
assertEquals(5, myCaptureQueriesListener.getSelectQueriesForCurrentThread().size());
|
||||
myCaptureQueriesListener.logUpdateQueriesForCurrentThread();
|
||||
assertEquals(0, myCaptureQueriesListener.getUpdateQueriesForCurrentThread().size());
|
||||
myCaptureQueriesListener.logInsertQueriesForCurrentThread();
|
||||
|
@ -536,7 +617,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseJpaR4Test {
|
|||
|
||||
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||
String sql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, true).toLowerCase();
|
||||
assertEquals(2, StringUtils.countMatches(sql, "join"), sql);
|
||||
assertEquals(1, StringUtils.countMatches(sql, "join"), sql);
|
||||
}
|
||||
|
||||
|
||||
|
@ -587,6 +668,129 @@ public class FhirResourceDaoR4QueryCountTest extends BaseJpaR4Test {
|
|||
assertEquals(0, myCaptureQueriesListener.getDeleteQueriesForCurrentThread().size());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTransactionWithConditionalCreate_MatchUrlCacheEnabled() {
|
||||
myDaoConfig.setMatchUrlCache(true);
|
||||
|
||||
Supplier<Bundle> bundleCreator = ()-> {
|
||||
BundleBuilder bb = new BundleBuilder(myFhirCtx);
|
||||
|
||||
Patient pt = new Patient();
|
||||
pt.setId(IdType.newRandomUuid());
|
||||
pt.addIdentifier().setSystem("http://foo").setValue("123");
|
||||
bb.addTransactionCreateEntry(pt).conditional("Patient?identifier=http://foo|123");
|
||||
|
||||
Observation obs = new Observation();
|
||||
obs.setId(IdType.newRandomUuid());
|
||||
obs.setSubject(new Reference(pt.getId()));
|
||||
bb.addTransactionCreateEntry(obs);
|
||||
|
||||
return (Bundle) bb.getBundle();
|
||||
};
|
||||
|
||||
// Run once (creates both)
|
||||
|
||||
myCaptureQueriesListener.clear();
|
||||
mySystemDao.transaction(mySrd, bundleCreator.get());
|
||||
assertEquals(1, myCaptureQueriesListener.countSelectQueries());
|
||||
assertEquals(5, myCaptureQueriesListener.countInsertQueries());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
|
||||
|
||||
runInTransaction(()->{
|
||||
List<String> types = myResourceTableDao.findAll().stream().map(t -> t.getResourceType()).collect(Collectors.toList());
|
||||
assertThat(types, containsInAnyOrder("Patient", "Observation"));
|
||||
});
|
||||
|
||||
// Run a second time (creates a new observation, reuses the patient, should use cache)
|
||||
|
||||
myCaptureQueriesListener.clear();
|
||||
mySystemDao.transaction(mySrd, bundleCreator.get());
|
||||
myCaptureQueriesListener.logSelectQueries();
|
||||
assertEquals(1, myCaptureQueriesListener.countSelectQueries());
|
||||
assertEquals(3, myCaptureQueriesListener.countInsertQueries());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
|
||||
|
||||
runInTransaction(()->{
|
||||
List<String> types = myResourceTableDao.findAll().stream().map(t -> t.getResourceType()).collect(Collectors.toList());
|
||||
assertThat(types, containsInAnyOrder("Patient", "Observation", "Observation"));
|
||||
});
|
||||
|
||||
// Run a third time (creates a new observation, reuses the patient, should use cache)
|
||||
|
||||
myCaptureQueriesListener.clear();
|
||||
mySystemDao.transaction(mySrd, bundleCreator.get());
|
||||
assertEquals(0, myCaptureQueriesListener.countSelectQueries());
|
||||
assertEquals(3, myCaptureQueriesListener.countInsertQueries());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
|
||||
|
||||
runInTransaction(()->{
|
||||
List<String> types = myResourceTableDao.findAll().stream().map(t -> t.getResourceType()).collect(Collectors.toList());
|
||||
assertThat(types, containsInAnyOrder("Patient", "Observation", "Observation", "Observation"));
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTransactionWithConditionalCreate_MatchUrlCacheNotEnabled() {
|
||||
|
||||
Supplier<Bundle> bundleCreator = ()-> {
|
||||
BundleBuilder bb = new BundleBuilder(myFhirCtx);
|
||||
|
||||
Patient pt = new Patient();
|
||||
pt.setId(IdType.newRandomUuid());
|
||||
pt.addIdentifier().setSystem("http://foo").setValue("123");
|
||||
bb.addTransactionCreateEntry(pt).conditional("Patient?identifier=http://foo|123");
|
||||
|
||||
Observation obs = new Observation();
|
||||
obs.setId(IdType.newRandomUuid());
|
||||
obs.setSubject(new Reference(pt.getId()));
|
||||
bb.addTransactionCreateEntry(obs);
|
||||
|
||||
return (Bundle) bb.getBundle();
|
||||
};
|
||||
|
||||
// Run once (creates both)
|
||||
|
||||
myCaptureQueriesListener.clear();
|
||||
mySystemDao.transaction(mySrd, bundleCreator.get());
|
||||
assertEquals(2, myCaptureQueriesListener.countSelectQueries());
|
||||
assertEquals(5, myCaptureQueriesListener.countInsertQueries());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
|
||||
|
||||
runInTransaction(()->{
|
||||
List<String> types = myResourceTableDao.findAll().stream().map(t -> t.getResourceType()).collect(Collectors.toList());
|
||||
assertThat(types, containsInAnyOrder("Patient", "Observation"));
|
||||
});
|
||||
|
||||
// Run a second time (creates a new observation, reuses the patient, should use cache)
|
||||
|
||||
myCaptureQueriesListener.clear();
|
||||
mySystemDao.transaction(mySrd, bundleCreator.get());
|
||||
myCaptureQueriesListener.logSelectQueries();
|
||||
assertEquals(2, myCaptureQueriesListener.countSelectQueries());
|
||||
assertEquals(3, myCaptureQueriesListener.countInsertQueries());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
|
||||
|
||||
runInTransaction(()->{
|
||||
List<String> types = myResourceTableDao.findAll().stream().map(t -> t.getResourceType()).collect(Collectors.toList());
|
||||
assertThat(types, containsInAnyOrder("Patient", "Observation", "Observation"));
|
||||
});
|
||||
|
||||
// Run a third time (creates a new observation, reuses the patient, should use cache)
|
||||
|
||||
myCaptureQueriesListener.clear();
|
||||
mySystemDao.transaction(mySrd, bundleCreator.get());
|
||||
assertEquals(1, myCaptureQueriesListener.countSelectQueries());
|
||||
assertEquals(3, myCaptureQueriesListener.countInsertQueries());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueries());
|
||||
|
||||
runInTransaction(()->{
|
||||
List<String> types = myResourceTableDao.findAll().stream().map(t -> t.getResourceType()).collect(Collectors.toList());
|
||||
assertThat(types, containsInAnyOrder("Patient", "Observation", "Observation", "Observation"));
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTransactionWithMultipleReferences() {
|
||||
Bundle input = new Bundle();
|
||||
|
@ -1087,7 +1291,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseJpaR4Test {
|
|||
|
||||
// Lookup the two existing IDs to make sure they are legit
|
||||
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||
assertEquals(6, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
||||
assertEquals(4, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
||||
assertEquals(3, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
||||
assertEquals(2, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
||||
|
@ -1146,7 +1350,7 @@ public class FhirResourceDaoR4QueryCountTest extends BaseJpaR4Test {
|
|||
|
||||
// Lookup the two existing IDs to make sure they are legit
|
||||
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||
assertEquals(6, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
||||
assertEquals(2, myCaptureQueriesListener.countSelectQueriesForCurrentThread());
|
||||
assertEquals(3, myCaptureQueriesListener.countInsertQueriesForCurrentThread());
|
||||
assertEquals(2, myCaptureQueriesListener.countUpdateQueriesForCurrentThread());
|
||||
assertEquals(0, myCaptureQueriesListener.countDeleteQueriesForCurrentThread());
|
||||
|
|
|
@ -857,7 +857,8 @@ public class FhirResourceDaoR4SearchCustomSearchParamTest extends BaseJpaR4Test
|
|||
foundResources = toUnqualifiedVersionlessIdValues(results);
|
||||
assertThat(foundResources, contains(p2id.getValue()));
|
||||
sql = myCaptureQueriesListener.logSelectQueriesForCurrentThread(0);
|
||||
assertThat(sql, countMatches(sql, "JOIN"), equalTo(1));
|
||||
assertThat(sql, countMatches(sql, "JOIN"), equalTo(0));
|
||||
assertThat(sql, countMatches(sql, "SELECT"), equalTo(1));
|
||||
assertThat(sql, countMatches(sql, "t0.SRC_PATH = 'Patient.extension('http://acme.org/sibling')'"), equalTo(1));
|
||||
assertThat(sql, countMatches(sql, "t0.TARGET_RESOURCE_ID = '"), equalTo(1));
|
||||
|
||||
|
@ -869,7 +870,7 @@ public class FhirResourceDaoR4SearchCustomSearchParamTest extends BaseJpaR4Test
|
|||
foundResources = toUnqualifiedVersionlessIdValues(results);
|
||||
assertThat(foundResources, contains(p2id.getValue()));
|
||||
sql = myCaptureQueriesListener.logSelectQueriesForCurrentThread(0);
|
||||
assertThat(sql, countMatches(sql, "JOIN"), equalTo(2));
|
||||
assertThat(sql, countMatches(sql, "JOIN"), equalTo(1));
|
||||
assertThat(sql, countMatches(sql, "SRC_PATH = 'Patient.extension('http://acme.org/sibling')'"), equalTo(1));
|
||||
assertThat(sql, countMatches(sql, "HASH_NORM_PREFIX = '"), equalTo(39));
|
||||
assertThat(sql, countMatches(sql, "SP_VALUE_NORMALIZED LIKE "), equalTo(39));
|
||||
|
@ -882,7 +883,7 @@ public class FhirResourceDaoR4SearchCustomSearchParamTest extends BaseJpaR4Test
|
|||
foundResources = toUnqualifiedVersionlessIdValues(results);
|
||||
assertThat(foundResources, containsInAnyOrder(appid.getValue()));
|
||||
sql = myCaptureQueriesListener.logSelectQueriesForCurrentThread(0);
|
||||
assertThat(sql, countMatches(sql, "JOIN"), equalTo(3));
|
||||
assertThat(sql, countMatches(sql, "JOIN"), equalTo(2));
|
||||
assertThat(sql, countMatches(sql, "SRC_PATH = 'Appointment.participant.actor.where(resolve() is Patient)'"), equalTo(1));
|
||||
assertThat(sql, countMatches(sql, "SRC_PATH = 'Patient.extension('http://acme.org/sibling')'"), equalTo(1));
|
||||
assertThat(sql, countMatches(sql, "SP_VALUE_NORMALIZED LIKE 'P1%'"), equalTo(39));
|
||||
|
|
|
@ -3697,7 +3697,7 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
|
|||
|
||||
String searchQuery = queries.get(0);
|
||||
assertEquals(1, countMatches(searchQuery.toUpperCase(), "HFJ_SPIDX_TOKEN"), searchQuery);
|
||||
assertEquals(2, countMatches(searchQuery.toUpperCase(), "LEFT OUTER JOIN"), searchQuery);
|
||||
assertEquals(1, countMatches(searchQuery.toUpperCase(), "LEFT OUTER JOIN"), searchQuery);
|
||||
assertEquals(2, countMatches(searchQuery.toUpperCase(), "RES_UPDATED"), searchQuery);
|
||||
}
|
||||
|
||||
|
@ -4119,7 +4119,7 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
|
|||
assertThat(toUnqualifiedVersionlessIdValues(found).toString(), toUnqualifiedVersionlessIdValues(found), contains(id1));
|
||||
|
||||
String searchQuery = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, true);
|
||||
assertEquals(1, countMatches(searchQuery.toLowerCase(), "join"), searchQuery);
|
||||
assertEquals(0, countMatches(searchQuery.toLowerCase(), "join"), searchQuery);
|
||||
assertEquals(0, countMatches(searchQuery.toLowerCase(), "partition"), searchQuery);
|
||||
assertEquals(1, countMatches(searchQuery.toLowerCase(), "hash_identity"), searchQuery);
|
||||
assertEquals(1, countMatches(searchQuery.toLowerCase(), "sp_value"), searchQuery);
|
||||
|
@ -4405,7 +4405,7 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
|
|||
assertThat(patients.toString(), patients, contains(obsId1));
|
||||
String searchQuery = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, true);
|
||||
ourLog.info("Search query:\n{}", searchQuery);
|
||||
assertEquals(1, countMatches(searchQuery.toLowerCase(), "join"), searchQuery);
|
||||
assertEquals(0, countMatches(searchQuery.toLowerCase(), "join"), searchQuery);
|
||||
assertEquals(1, countMatches(searchQuery.toLowerCase(), "t0.sp_value_high_date_ordinal >= '20200605'"), searchQuery);
|
||||
assertEquals(1, countMatches(searchQuery.toLowerCase(), "t0.sp_value_low_date_ordinal <= '20200606'"), searchQuery);
|
||||
}
|
||||
|
@ -4440,7 +4440,7 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
|
|||
assertThat(patients.toString(), patients, containsInAnyOrder(obsId3, obsId4));
|
||||
String searchQuery = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, true);
|
||||
ourLog.info("Search query:\n{}", searchQuery);
|
||||
assertEquals(1, countMatches(searchQuery.toLowerCase(), "join"), searchQuery);
|
||||
assertEquals(0, countMatches(searchQuery.toLowerCase(), "join"), searchQuery);
|
||||
assertEquals(1, countMatches(searchQuery.toLowerCase(), "hash_identity"), searchQuery);
|
||||
assertEquals(1, countMatches(searchQuery.toLowerCase(), "sp_value_low"), searchQuery);
|
||||
}
|
||||
|
@ -5465,7 +5465,7 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
|
|||
assertEquals(1, outcome.sizeOrThrowNpe());
|
||||
|
||||
String searchSql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, true);
|
||||
assertEquals(4, countMatches(searchSql, "JOIN"));
|
||||
assertEquals(3, countMatches(searchSql, "JOIN"));
|
||||
assertEquals(1, countMatches(searchSql, "SELECT"));
|
||||
|
||||
}
|
||||
|
|
|
@ -790,7 +790,7 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
|
|||
|
||||
String selectQuery = myCaptureQueriesListener.getSelectQueries().get(0).getSql(true, true);
|
||||
ourLog.info(selectQuery);
|
||||
assertEquals(2, StringUtils.countMatches(selectQuery, "JOIN"));
|
||||
assertEquals(1, StringUtils.countMatches(selectQuery, "JOIN"));
|
||||
assertEquals(1, StringUtils.countMatches(selectQuery, "SELECT"));
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,47 @@
|
|||
package ca.uhn.fhir.jpa.dao.r4;
|
||||
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.rest.param.StringParam;
|
||||
import ca.uhn.fhir.rest.param.TokenParam;
|
||||
import org.hl7.fhir.r4.model.Patient;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
|
||||
public class FhirResourceDaoR4SearchSqlTest extends BaseJpaR4Test {
|
||||
|
||||
/**
|
||||
* One regular search params - Doesn't need HFJ_RESOURCE as root
|
||||
*/
|
||||
@Test
|
||||
public void testSingleRegularSearchParam() {
|
||||
|
||||
myCaptureQueriesListener.clear();
|
||||
SearchParameterMap map = SearchParameterMap.newSynchronous(Patient.SP_NAME, new StringParam("FOO"));
|
||||
myPatientDao.search(map);
|
||||
assertEquals(1, myCaptureQueriesListener.countSelectQueries());
|
||||
String sql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(false, false);
|
||||
assertEquals("SELECT t0.RES_ID FROM HFJ_SPIDX_STRING t0 WHERE ((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?))", sql);
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Two regular search params - Should use HFJ_RESOURCE as root
|
||||
*/
|
||||
@Test
|
||||
public void testTwoRegularSearchParams() {
|
||||
|
||||
myCaptureQueriesListener.clear();
|
||||
SearchParameterMap map = SearchParameterMap.newSynchronous()
|
||||
.add(Patient.SP_NAME, new StringParam("FOO"))
|
||||
.add(Patient.SP_GENDER, new TokenParam("a", "b"));
|
||||
myPatientDao.search(map);
|
||||
assertEquals(1, myCaptureQueriesListener.countSelectQueries());
|
||||
String sql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(false, false);
|
||||
assertEquals("SELECT t1.RES_ID FROM HFJ_RESOURCE t1 LEFT OUTER JOIN HFJ_SPIDX_STRING t0 ON (t1.RES_ID = t0.RES_ID) LEFT OUTER JOIN HFJ_SPIDX_TOKEN t2 ON (t1.RES_ID = t2.RES_ID) WHERE (((t0.HASH_NORM_PREFIX = ?) AND (t0.SP_VALUE_NORMALIZED LIKE ?)) AND (t2.HASH_SYS_AND_VALUE = ?))", sql);
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -1,21 +1,25 @@
|
|||
package ca.uhn.fhir.jpa.dao.r4;
|
||||
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.jpa.util.TestUtil;
|
||||
import ca.uhn.fhir.model.primitive.InstantDt;
|
||||
import ca.uhn.fhir.rest.api.MethodOutcome;
|
||||
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||
import ca.uhn.fhir.rest.param.StringParam;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ResourceGoneException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ResourceVersionConflictException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
|
||||
import ca.uhn.fhir.util.BundleBuilder;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.Bundle;
|
||||
import org.hl7.fhir.r4.model.CanonicalType;
|
||||
import org.hl7.fhir.r4.model.CodeSystem;
|
||||
import org.hl7.fhir.r4.model.Coding;
|
||||
|
@ -23,14 +27,15 @@ import org.hl7.fhir.r4.model.ContactPoint;
|
|||
import org.hl7.fhir.r4.model.IdType;
|
||||
import org.hl7.fhir.r4.model.InstantType;
|
||||
import org.hl7.fhir.r4.model.Meta;
|
||||
import org.hl7.fhir.r4.model.Observation;
|
||||
import org.hl7.fhir.r4.model.Organization;
|
||||
import org.hl7.fhir.r4.model.Patient;
|
||||
import org.hl7.fhir.r4.model.Reference;
|
||||
import org.hl7.fhir.r4.model.Resource;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.AfterAll;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Disabled;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
|
@ -39,6 +44,7 @@ import java.util.List;
|
|||
import java.util.Set;
|
||||
import java.util.TimeZone;
|
||||
import java.util.UUID;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.contains;
|
||||
|
@ -53,7 +59,9 @@ import static org.junit.jupiter.api.Assertions.assertNotEquals;
|
|||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
import static org.mockito.ArgumentMatchers.eq;
|
||||
import static org.mockito.Mockito.reset;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
public class FhirResourceDaoR4UpdateTest extends BaseJpaR4Test {
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirResourceDaoR4UpdateTest.class);
|
||||
|
@ -71,6 +79,30 @@ public class FhirResourceDaoR4UpdateTest extends BaseJpaR4Test {
|
|||
myInterceptorRegistry.registerInterceptor(myInterceptor);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCreateWithClientAssignedId_CheckDisabledMode_AlreadyExists() {
|
||||
when(mySrd.getHeader(eq(JpaConstants.HEADER_UPSERT_EXISTENCE_CHECK))).thenReturn(JpaConstants.HEADER_UPSERT_EXISTENCE_CHECK_DISABLED);
|
||||
|
||||
runInTransaction(() -> {
|
||||
Patient p = new Patient();
|
||||
p.setId("AAA");
|
||||
p.getMaritalStatus().setText("123");
|
||||
return myPatientDao.update(p, mySrd).getId().toUnqualified();
|
||||
});
|
||||
try {
|
||||
runInTransaction(() -> {
|
||||
Patient p = new Patient();
|
||||
p.setId("AAA");
|
||||
p.getMaritalStatus().setText("123");
|
||||
return myPatientDao.update(p, mySrd).getId().toUnqualified();
|
||||
});
|
||||
fail();
|
||||
} catch (ResourceVersionConflictException e) {
|
||||
assertThat(e.getMessage(), containsString("It can also happen when a request disables the Upsert Existence Check."));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testCreateAndUpdateWithoutRequest() {
|
||||
String methodName = "testUpdateByUrl";
|
||||
|
|
|
@ -4,6 +4,7 @@ import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
|||
import ca.uhn.fhir.jpa.model.entity.ModelConfig;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||
import ca.uhn.fhir.rest.param.TokenParam;
|
||||
import ca.uhn.fhir.util.BundleBuilder;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
|
@ -183,6 +184,9 @@ public class FhirResourceDaoR4VersionedReferenceTest extends BaseJpaR4Test {
|
|||
myEncounterDao.create(encounter);
|
||||
}
|
||||
|
||||
// Verify Patient Version
|
||||
assertEquals("2", myPatientDao.search(SearchParameterMap.newSynchronous("active", new TokenParam("false"))).getResources(0,1).get(0).getIdElement().getVersionIdPart());
|
||||
|
||||
BundleBuilder builder = new BundleBuilder(myFhirCtx);
|
||||
|
||||
Patient patient = new Patient();
|
||||
|
@ -215,6 +219,7 @@ public class FhirResourceDaoR4VersionedReferenceTest extends BaseJpaR4Test {
|
|||
// Read back and verify that reference is now versioned
|
||||
observation = myObservationDao.read(observationId);
|
||||
assertEquals(patientId.getValue(), observation.getSubject().getReference());
|
||||
assertEquals("2", observation.getSubject().getReferenceElement().getVersionIdPart());
|
||||
assertEquals(encounterId.toVersionless().getValue(), observation.getEncounter().getReference());
|
||||
|
||||
}
|
||||
|
|
|
@ -71,6 +71,7 @@ import org.springframework.transaction.support.TransactionCallback;
|
|||
import org.springframework.transaction.support.TransactionCallbackWithoutResult;
|
||||
import org.springframework.transaction.support.TransactionTemplate;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
|
@ -85,6 +86,7 @@ import java.util.stream.Collectors;
|
|||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.contains;
|
||||
import static org.hamcrest.Matchers.containsInAnyOrder;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.empty;
|
||||
import static org.hamcrest.Matchers.emptyString;
|
||||
|
@ -1119,6 +1121,23 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest {
|
|||
assertThat(respEntry.getResponse().getLocation(), endsWith("/_history/1"));
|
||||
assertEquals("1", respEntry.getResponse().getEtag());
|
||||
|
||||
/*
|
||||
* Third time should not update
|
||||
*/
|
||||
|
||||
request = new Bundle();
|
||||
o = new Observation();
|
||||
o.getCode().setText("Some Observation");
|
||||
request.addEntry().setResource(o).getRequest().setMethod(HTTPVerb.POST).setIfNoneExist("Observation?_lastUpdated=gt2011-01-01");
|
||||
resp = mySystemDao.transaction(mySrd, request);
|
||||
assertEquals(1, resp.getEntry().size());
|
||||
|
||||
respEntry = resp.getEntry().get(0);
|
||||
assertEquals(Constants.STATUS_HTTP_200_OK + " OK", respEntry.getResponse().getStatus());
|
||||
assertThat(respEntry.getResponse().getLocation(), containsString("Observation/"));
|
||||
assertThat(respEntry.getResponse().getLocation(), endsWith("/_history/1"));
|
||||
assertEquals("1", respEntry.getResponse().getEtag());
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -2580,6 +2599,56 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest {
|
|||
|
||||
}
|
||||
|
||||
/**
|
||||
* DAOs can't handle references where <code>Reference.setResource</code>
|
||||
* is set but not <code>Reference.setReference</code> so make sure
|
||||
* we block this so it doesn't get used accidentally.
|
||||
*/
|
||||
@Test
|
||||
public void testTransactionWithResourceReferenceInsteadOfIdReferenceBlocked() {
|
||||
|
||||
Bundle input = createBundleWithConditionalCreateReferenceByResource();
|
||||
mySystemDao.transaction(mySrd, input);
|
||||
|
||||
// Fails the second time
|
||||
try {
|
||||
input = createBundleWithConditionalCreateReferenceByResource();
|
||||
mySystemDao.transaction(mySrd, input);
|
||||
fail();
|
||||
} catch (InternalErrorException e) {
|
||||
assertEquals("References by resource with no reference ID are not supported in DAO layer", e.getMessage());
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
private Bundle createBundleWithConditionalCreateReferenceByResource() {
|
||||
Bundle input = new Bundle();
|
||||
input.setType(BundleType.TRANSACTION);
|
||||
|
||||
Patient p = new Patient();
|
||||
p.setId(IdType.newRandomUuid());
|
||||
p.addIdentifier().setSystem("foo").setValue("bar");
|
||||
input.addEntry()
|
||||
.setFullUrl(p.getId())
|
||||
.setResource(p)
|
||||
.getRequest()
|
||||
.setMethod(HTTPVerb.POST)
|
||||
.setUrl("Patient")
|
||||
.setIfNoneExist("Patient?identifier=foo|bar");
|
||||
|
||||
Observation o1 = new Observation();
|
||||
o1.setId(IdType.newRandomUuid());
|
||||
o1.setStatus(ObservationStatus.FINAL);
|
||||
o1.getSubject().setResource(p); // Not allowed
|
||||
input.addEntry()
|
||||
.setFullUrl(o1.getId())
|
||||
.setResource(o1)
|
||||
.getRequest()
|
||||
.setMethod(HTTPVerb.POST)
|
||||
.setUrl("Observation");
|
||||
return input;
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testDeleteInTransactionShouldFailWhenReferencesExist() {
|
||||
|
@ -2663,7 +2732,7 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest {
|
|||
Observation o = new Observation();
|
||||
o.setId(IdType.newRandomUuid());
|
||||
o.setStatus(ObservationStatus.FINAL);
|
||||
o.getSubject().setResource(p);
|
||||
o.getSubject().setReference(p.getId());
|
||||
b.addEntry()
|
||||
.setFullUrl(o.getId())
|
||||
.setResource(o)
|
||||
|
@ -4037,4 +4106,5 @@ public class FhirSystemDaoR4Test extends BaseJpaR4SystemTest {
|
|||
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -2631,7 +2631,7 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
|
|||
List<String> ids = toUnqualifiedIdValues(results);
|
||||
assertThat(ids, contains(id.withVersion("2").getValue(), id.withVersion("1").getValue()));
|
||||
|
||||
assertEquals(4, myCaptureQueriesListener.getSelectQueriesForCurrentThread().size());
|
||||
assertEquals(3, myCaptureQueriesListener.getSelectQueriesForCurrentThread().size());
|
||||
|
||||
// Resolve resource
|
||||
String searchSql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, true);
|
||||
|
@ -2648,12 +2648,6 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
|
|||
ourLog.info("SQL:{}", searchSql);
|
||||
assertEquals(0, countMatches(searchSql, "PARTITION_ID="), searchSql.replace(" ", "").toUpperCase());
|
||||
assertEquals(0, countMatches(searchSql, "PARTITION_IDIN"), searchSql.replace(" ", "").toUpperCase());
|
||||
|
||||
// Fetch history resource
|
||||
searchSql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(3).getSql(true, true);
|
||||
ourLog.info("SQL:{}", searchSql);
|
||||
assertEquals(0, countMatches(searchSql, "PARTITION_ID="), searchSql.replace(" ", "").toUpperCase());
|
||||
assertEquals(0, countMatches(searchSql, "PARTITION_IDIN"), searchSql.replace(" ", "").toUpperCase());
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -2694,9 +2688,9 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
|
|||
List<String> ids = toUnqualifiedIdValues(results);
|
||||
assertThat(ids, contains(id.withVersion("2").getValue(), id.withVersion("1").getValue()));
|
||||
|
||||
assertEquals(4, myCaptureQueriesListener.getSelectQueriesForCurrentThread().size());
|
||||
assertEquals(3, myCaptureQueriesListener.getSelectQueriesForCurrentThread().size());
|
||||
|
||||
// Resolve resource
|
||||
// Fetch history resource
|
||||
String sql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, true);
|
||||
ourLog.info("SQL:{}", sql);
|
||||
assertEquals(0, countMatches(sql, "PARTITION_ID="));
|
||||
|
@ -2710,11 +2704,6 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
|
|||
sql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(2).getSql(true, true);
|
||||
ourLog.info("SQL:{}", sql);
|
||||
assertEquals(0, countMatches(sql, "PARTITION_ID="));
|
||||
|
||||
// Fetch history resource
|
||||
sql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(3).getSql(true, true);
|
||||
ourLog.info("SQL:{}", sql);
|
||||
assertEquals(0, countMatches(sql, "PARTITION_ID="));
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -2764,7 +2753,7 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
|
|||
List<String> ids = toUnqualifiedIdValues(results);
|
||||
assertThat(ids, contains(id1B.withVersion("1").getValue(), id1A.withVersion("1").getValue()));
|
||||
|
||||
assertEquals(3, myCaptureQueriesListener.getSelectQueriesForCurrentThread().size());
|
||||
assertEquals(2, myCaptureQueriesListener.getSelectQueriesForCurrentThread().size());
|
||||
|
||||
// Count
|
||||
ourLog.info("SQL:{}", myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, true));
|
||||
|
@ -2777,12 +2766,6 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
|
|||
ourLog.info("SQL:{}", sql);
|
||||
assertEquals(1, countMatches(sql, "PARTITION_ID IN ('1')"), sql);
|
||||
|
||||
// Fetch history resource
|
||||
sql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(2).getSql(true, false);
|
||||
sql = sql.replace(" ", "").toUpperCase();
|
||||
ourLog.info("SQL:{}", sql);
|
||||
assertEquals(0, countMatches(sql, "PARTITION_ID="), sql);
|
||||
assertEquals(0, countMatches(sql, "PARTITION_IDIN"), sql);
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -2802,20 +2785,15 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
|
|||
List<String> ids = toUnqualifiedIdValues(results);
|
||||
assertThat(ids, contains(id1B.withVersion("1").getValue(), id1A.withVersion("1").getValue()));
|
||||
|
||||
assertEquals(3, myCaptureQueriesListener.getSelectQueriesForCurrentThread().size());
|
||||
assertEquals(2, myCaptureQueriesListener.getSelectQueriesForCurrentThread().size());
|
||||
|
||||
// Count
|
||||
String searchSql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, true);
|
||||
ourLog.info("SQL:{}", searchSql);
|
||||
assertEquals(1, countMatches(searchSql, "PARTITION_ID is null"), searchSql);
|
||||
|
||||
// Fetch history
|
||||
searchSql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(1).getSql(true, true);
|
||||
ourLog.info("SQL:{}", searchSql);
|
||||
assertEquals(1, countMatches(searchSql, "PARTITION_ID is null"), searchSql);
|
||||
|
||||
// Fetch history resource
|
||||
searchSql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(2).getSql(true, true);
|
||||
searchSql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(1).getSql(true, true);
|
||||
ourLog.info("SQL:{}", searchSql);
|
||||
assertEquals(0, countMatches(searchSql, "PARTITION_ID="), searchSql.replace(" ", "").toUpperCase());
|
||||
assertEquals(0, countMatches(searchSql, "PARTITION_IDIN"), searchSql.replace(" ", "").toUpperCase());
|
||||
|
@ -2885,7 +2863,7 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
|
|||
List<String> ids = toUnqualifiedIdValues(results);
|
||||
assertThat(ids, contains(id1B.withVersion("1").getValue(), id1A.withVersion("1").getValue()));
|
||||
|
||||
assertEquals(3, myCaptureQueriesListener.getSelectQueriesForCurrentThread().size());
|
||||
assertEquals(2, myCaptureQueriesListener.getSelectQueriesForCurrentThread().size());
|
||||
|
||||
// Count
|
||||
String sql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, false).toUpperCase();
|
||||
|
@ -2893,15 +2871,10 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
|
|||
assertEquals(1, countMatches(sql, "COUNT("), sql);
|
||||
assertEquals(1, countMatches(sql, "PARTITION_ID IN ('1')"), sql);
|
||||
|
||||
// Fetch history resources
|
||||
// History
|
||||
sql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(1).getSql(true, false).toUpperCase();
|
||||
ourLog.info("SQL:{}", sql);
|
||||
assertEquals(1, countMatches(sql, "PARTITION_ID IN ('1')"), sql);
|
||||
|
||||
// Resolve forced ID
|
||||
sql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(2).getSql(true, false).toUpperCase();
|
||||
ourLog.info("SQL:{}", sql);
|
||||
assertEquals(0, countMatches(sql, "PARTITION_ID IN ('1')"), sql);
|
||||
}
|
||||
|
||||
|
||||
|
@ -2923,7 +2896,7 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
|
|||
assertThat(ids, contains(id1B.withVersion("1").getValue(), id1A.withVersion("1").getValue()));
|
||||
|
||||
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||
assertEquals(3, myCaptureQueriesListener.getSelectQueriesForCurrentThread().size());
|
||||
assertEquals(2, myCaptureQueriesListener.getSelectQueriesForCurrentThread().size());
|
||||
|
||||
// Resolve resource
|
||||
String sql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, true).toUpperCase();
|
||||
|
@ -2934,10 +2907,6 @@ public class PartitioningSqlR4Test extends BasePartitioningR4Test {
|
|||
sql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(1).getSql(true, true).toUpperCase();
|
||||
assertEquals(1, countMatches(sql, "PARTITION_ID IS NULL"));
|
||||
|
||||
// Resolve forced IDs
|
||||
sql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(2).getSql(true, true).toUpperCase();
|
||||
assertEquals(1, countMatches(sql, "FORCEDID0_.RESOURCE_PID IN"), sql);
|
||||
assertEquals(0, countMatches(sql, "PARTITION_ID IS NULL"), sql);
|
||||
}
|
||||
|
||||
@Test
|
||||
|
|
|
@ -616,6 +616,7 @@ public class ResourceProviderHasParamR4Test extends BaseResourceProviderR4Test {
|
|||
|
||||
List<String> notInListQueries = new ArrayList<>();
|
||||
for (String query : queries) {
|
||||
ourLog.info("Query: {}", query);
|
||||
if (query.contains("RES_ID NOT IN"))
|
||||
notInListQueries.add(query);
|
||||
}
|
||||
|
|
|
@ -132,7 +132,9 @@ public class ServerCapabilityStatementProviderJpaR4Test extends BaseResourceProv
|
|||
.stream()
|
||||
.map(t -> t.getCode())
|
||||
.collect(Collectors.toList());
|
||||
assertThat(formats.toString(), formats, containsInAnyOrder(
|
||||
assertThat(formats.toString(), formats, hasItems(
|
||||
"application/x-turtle",
|
||||
"ttl",
|
||||
"application/fhir+xml",
|
||||
"application/fhir+json",
|
||||
"json",
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE9-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE9-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -149,13 +149,13 @@
|
|||
<dependency>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-test-utilities</artifactId>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE9-SNAPSHOT</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-jpaserver-test-utilities</artifactId>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE9-SNAPSHOT</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE9-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -55,13 +55,13 @@
|
|||
<dependency>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-test-utilities</artifactId>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE9-SNAPSHOT</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-jpaserver-test-utilities</artifactId>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE9-SNAPSHOT</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE9-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE9-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -132,6 +132,12 @@
|
|||
<artifactId>quartz</artifactId>
|
||||
</dependency>
|
||||
|
||||
<!-- Caffeine -->
|
||||
<dependency>
|
||||
<groupId>com.github.ben-manes.caffeine</groupId>
|
||||
<artifactId>caffeine</artifactId>
|
||||
</dependency>
|
||||
|
||||
<!-- Java -->
|
||||
<dependency>
|
||||
<groupId>javax.annotation</groupId>
|
||||
|
|
|
@ -28,6 +28,7 @@ import org.hl7.fhir.dstu2.model.Subscription;
|
|||
import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
||||
import org.hl7.fhir.r4.model.DateTimeType;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import javax.annotation.PostConstruct;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
|
@ -382,6 +383,8 @@ public class ModelConfig {
|
|||
return this;
|
||||
}
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* This setting indicates which subscription channel types are supported by the server. Any subscriptions submitted
|
||||
* to the server matching these types will be activated.
|
||||
|
|
|
@ -227,6 +227,8 @@ public class JpaConstants {
|
|||
* Parameter for the $expand operation
|
||||
*/
|
||||
public static final String OPERATION_EXPAND_PARAM_INCLUDE_HIERARCHY = "includeHierarchy";
|
||||
public static final String HEADER_UPSERT_EXISTENCE_CHECK = "X-Upsert-Extistence-Check";
|
||||
public static final String HEADER_UPSERT_EXISTENCE_CHECK_DISABLED = "disabled";
|
||||
|
||||
/**
|
||||
* Non-instantiable
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE9-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -23,9 +23,9 @@ package ca.uhn.fhir.jpa.searchparam;
|
|||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.RuntimeResourceDefinition;
|
||||
import ca.uhn.fhir.context.RuntimeSearchParam;
|
||||
import ca.uhn.fhir.jpa.model.entity.ModelConfig;
|
||||
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||
import ca.uhn.fhir.jpa.searchparam.util.JpaParamUtil;
|
||||
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
|
||||
import ca.uhn.fhir.model.api.IQueryParameterAnd;
|
||||
import ca.uhn.fhir.model.api.IQueryParameterType;
|
||||
import ca.uhn.fhir.model.api.Include;
|
||||
|
@ -35,6 +35,7 @@ import ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum;
|
|||
import ca.uhn.fhir.rest.param.DateRangeParam;
|
||||
import ca.uhn.fhir.rest.param.ParameterUtil;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
|
||||
import ca.uhn.fhir.util.ReflectionUtil;
|
||||
import ca.uhn.fhir.util.UrlUtil;
|
||||
import com.google.common.collect.ArrayListMultimap;
|
||||
|
@ -52,6 +53,8 @@ public class MatchUrlService {
|
|||
private FhirContext myContext;
|
||||
@Autowired
|
||||
private ISearchParamRegistry mySearchParamRegistry;
|
||||
@Autowired
|
||||
private ModelConfig myModelConfig;
|
||||
|
||||
public SearchParameterMap translateMatchUrl(String theMatchUrl, RuntimeResourceDefinition theResourceDefinition, Flag... theFlags) {
|
||||
SearchParameterMap paramMap = new SearchParameterMap();
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE9-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE9-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE9-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -169,7 +169,7 @@
|
|||
<dependency>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-converter</artifactId>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE9-SNAPSHOT</version>
|
||||
</dependency>
|
||||
|
||||
</dependencies>
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE9-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE9-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE9-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE9-SNAPSHOT</version>
|
||||
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-spring-boot-samples</artifactId>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE9-SNAPSHOT</version>
|
||||
</parent>
|
||||
|
||||
<artifactId>hapi-fhir-spring-boot-sample-client-apache</artifactId>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-spring-boot-samples</artifactId>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE9-SNAPSHOT</version>
|
||||
</parent>
|
||||
|
||||
<artifactId>hapi-fhir-spring-boot-sample-client-okhttp</artifactId>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-spring-boot-samples</artifactId>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE9-SNAPSHOT</version>
|
||||
</parent>
|
||||
|
||||
<artifactId>hapi-fhir-spring-boot-sample-server-jersey</artifactId>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-spring-boot</artifactId>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE9-SNAPSHOT</version>
|
||||
</parent>
|
||||
|
||||
<artifactId>hapi-fhir-spring-boot-samples</artifactId>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE9-SNAPSHOT</version>
|
||||
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE9-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE9-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE9-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE9-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE9-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE9-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE9-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE9-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE9-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE9-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE9-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE9-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE9-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE9-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE9-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE9-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -58,37 +58,37 @@
|
|||
<dependency>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-structures-dstu3</artifactId>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE9-SNAPSHOT</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-structures-hl7org-dstu2</artifactId>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE9-SNAPSHOT</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-structures-r4</artifactId>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE9-SNAPSHOT</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-structures-r5</artifactId>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE9-SNAPSHOT</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-validation-resources-dstu2</artifactId>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE9-SNAPSHOT</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-validation-resources-dstu3</artifactId>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE9-SNAPSHOT</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-validation-resources-r4</artifactId>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE9-SNAPSHOT</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.velocity</groupId>
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE9-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
2
pom.xml
2
pom.xml
|
@ -6,7 +6,7 @@
|
|||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<packaging>pom</packaging>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE9-SNAPSHOT</version>
|
||||
<name>HAPI-FHIR</name>
|
||||
<description>An open-source implementation of the FHIR specification in Java.</description>
|
||||
<url>https://hapifhir.io</url>
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE9-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE9-SNAPSHOT</version>
|
||||
<relativePath>../../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE9-SNAPSHOT</version>
|
||||
<relativePath>../../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>5.4.0-PRE8-SNAPSHOT</version>
|
||||
<version>5.4.0-PRE9-SNAPSHOT</version>
|
||||
<relativePath>../../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
Loading…
Reference in New Issue