Don't merge yet: MegaScale API Changes 2 (#4544)
* Megascale changes 2 * Cleanup * CLeanup * Sync master * Cleanup * Ongoing work * Move service * Fixes * Ongoing megascaler work * Work * Work * Fixes * Work * Test fixes * Work on remote services * Interceptor rework * Work on config refactor * Fixes * Docs tweaks * Address review comments * Test tweak * Test fixes * Try to fix tests * Test tweaks * Fix changelogs * Work on merging * Changes * Megascale fixes * Remove accidental commit * Address fixmes * Cleanup * Test fixes * Compile fix * Test fixes * Account for review comments * Bump HAPI FHIR version
This commit is contained in:
parent
cedf69516b
commit
bf495e2d92
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>6.5.2-SNAPSHOT</version>
|
||||
<version>6.5.3-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.5.2-SNAPSHOT</version>
|
||||
<version>6.5.3-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.5.2-SNAPSHOT</version>
|
||||
<version>6.5.3-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -628,6 +628,13 @@ public enum Pointcut implements IPointcut {
|
|||
* This method is called after all processing is completed for a request, but only if the
|
||||
* request completes normally (i.e. no exception is thrown).
|
||||
* <p>
|
||||
* This pointcut is called after the response has completely finished, meaning that the HTTP respsonse to the client
|
||||
* may or may not have already completely been returned to the client by the time this pointcut is invoked. Use caution
|
||||
* if you have timing-dependent logic, since there is no guarantee about whether the client will have already moved on
|
||||
* by the time your method is invoked. If you need a guarantee that your method is invoked before returning to the
|
||||
* client, consider using {@link #SERVER_OUTGOING_RESPONSE} instead.
|
||||
* </p>
|
||||
* <p>
|
||||
* Hooks may accept the following parameters:
|
||||
* <ul>
|
||||
* <li>
|
||||
|
@ -1245,7 +1252,8 @@ public enum Pointcut implements IPointcut {
|
|||
* <ul>
|
||||
* <li>
|
||||
* ca.uhn.fhir.rest.server.util.ICachedSearchDetails - Contains the details of the search that
|
||||
* is being created and initialized
|
||||
* is being created and initialized. Interceptors may use this parameter to modify aspects of the search
|
||||
* before it is stored and executed.
|
||||
* </li>
|
||||
* <li>
|
||||
* ca.uhn.fhir.rest.api.server.RequestDetails - A bean containing details about the request that is about to be processed, including details such as the
|
||||
|
@ -1264,6 +1272,9 @@ public enum Pointcut implements IPointcut {
|
|||
* <li>
|
||||
* ca.uhn.fhir.jpa.searchparam.SearchParameterMap - Contains the details of the search being checked. This can be modified.
|
||||
* </li>
|
||||
* <li>
|
||||
* ca.uhn.fhir.interceptor.model.RequestPartitionId - The partition associated with the request (or {@literal null} if the server is not partitioned)
|
||||
* </li>
|
||||
* </ul>
|
||||
* <p>
|
||||
* Hooks should return <code>void</code>.
|
||||
|
@ -1273,7 +1284,8 @@ public enum Pointcut implements IPointcut {
|
|||
"ca.uhn.fhir.rest.server.util.ICachedSearchDetails",
|
||||
"ca.uhn.fhir.rest.api.server.RequestDetails",
|
||||
"ca.uhn.fhir.rest.server.servlet.ServletRequestDetails",
|
||||
"ca.uhn.fhir.jpa.searchparam.SearchParameterMap"
|
||||
"ca.uhn.fhir.jpa.searchparam.SearchParameterMap",
|
||||
"ca.uhn.fhir.interceptor.model.RequestPartitionId"
|
||||
),
|
||||
|
||||
/**
|
||||
|
@ -1912,8 +1924,11 @@ public enum Pointcut implements IPointcut {
|
|||
|
||||
/**
|
||||
* <b>Storage Hook:</b>
|
||||
* Invoked before FHIR read/access operation (e.g. <b>read/vread</b>, <b>search</b>, <b>history</b>, etc.) operation to request the
|
||||
* identification of the partition ID to be associated with the resource(s) being searched for, read, etc.
|
||||
* Invoked before any FHIR read/access/extended operation (e.g. <b>read/vread</b>, <b>search</b>, <b>history</b>,
|
||||
* <b>$reindex</b>, etc.) operation to request the identification of the partition ID to be associated with
|
||||
* the resource(s) being searched for, read, etc. Essentially any operations in the JPA server that are not
|
||||
* creating a resource will use this pointcut. Creates will use {@link #STORAGE_PARTITION_IDENTIFY_CREATE}.
|
||||
*
|
||||
* <p>
|
||||
* This hook will only be called if
|
||||
* partitioning is enabled in the JPA server.
|
||||
|
@ -2193,6 +2208,30 @@ public enum Pointcut implements IPointcut {
|
|||
"ca.uhn.fhir.rest.server.TransactionLogMessages",
|
||||
"ca.uhn.fhir.mdm.api.MdmLinkEvent"),
|
||||
|
||||
|
||||
/**
|
||||
* <b>JPA Hook:</b>
|
||||
* This hook is invoked when a cross-partition reference is about to be
|
||||
* stored in the database.
|
||||
* <p>
|
||||
* <b>This is an experimental API - It may change in the future, use with caution.</b>
|
||||
* </p>
|
||||
* <p>
|
||||
* Hooks may accept the following parameters:
|
||||
* </p>
|
||||
* <ul>
|
||||
* <li>
|
||||
* {@literal ca.uhn.fhir.jpa.searchparam.extractor.CrossPartitionReferenceDetails} - Contains details about the
|
||||
* cross partition reference.
|
||||
* </li>
|
||||
* </ul>
|
||||
* <p>
|
||||
* Hooks should return <code>void</code>.
|
||||
* </p>
|
||||
*/
|
||||
JPA_RESOLVE_CROSS_PARTITION_REFERENCE("ca.uhn.fhir.jpa.model.cross.IResourceLookup",
|
||||
"ca.uhn.fhir.jpa.searchparam.extractor.CrossPartitionReferenceDetails"),
|
||||
|
||||
/**
|
||||
* <b>Performance Tracing Hook:</b>
|
||||
* This hook is invoked when any informational messages generated by the
|
||||
|
|
|
@ -137,12 +137,12 @@ public class RequestPartitionId implements IModelJson {
|
|||
|
||||
RequestPartitionId that = (RequestPartitionId) theO;
|
||||
|
||||
return new EqualsBuilder()
|
||||
.append(myAllPartitions, that.myAllPartitions)
|
||||
.append(myPartitionDate, that.myPartitionDate)
|
||||
.append(myPartitionIds, that.myPartitionIds)
|
||||
.append(myPartitionNames, that.myPartitionNames)
|
||||
.isEquals();
|
||||
EqualsBuilder b = new EqualsBuilder();
|
||||
b.append(myAllPartitions, that.myAllPartitions);
|
||||
b.append(myPartitionDate, that.myPartitionDate);
|
||||
b.append(myPartitionIds, that.myPartitionIds);
|
||||
b.append(myPartitionNames, that.myPartitionNames);
|
||||
return b.isEquals();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -307,8 +307,12 @@ public class Constants {
|
|||
public static final String PARAMQUALIFIER_TOKEN_NOT_IN = ":not-in";
|
||||
public static final String PARAMQUALIFIER_TOKEN_ABOVE = ":above";
|
||||
public static final String PARAMQUALIFIER_TOKEN_BELOW = ":below";
|
||||
/**
|
||||
* The number of characters in a UUID (36)
|
||||
*/
|
||||
public static final int UUID_LENGTH = 36;
|
||||
|
||||
static {
|
||||
static {
|
||||
CHARSET_UTF8 = StandardCharsets.UTF_8;
|
||||
CHARSET_US_ASCII = StandardCharsets.ISO_8859_1;
|
||||
|
||||
|
|
|
@ -4,14 +4,14 @@
|
|||
<modelVersion>4.0.0</modelVersion>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-bom</artifactId>
|
||||
<version>6.5.2-SNAPSHOT</version>
|
||||
<version>6.5.3-SNAPSHOT</version>
|
||||
<packaging>pom</packaging>
|
||||
<name>HAPI FHIR BOM</name>
|
||||
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.5.2-SNAPSHOT</version>
|
||||
<version>6.5.3-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>6.5.2-SNAPSHOT</version>
|
||||
<version>6.5.3-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.5.2-SNAPSHOT</version>
|
||||
<version>6.5.3-SNAPSHOT</version>
|
||||
<relativePath>../../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
@ -105,8 +105,8 @@
|
|||
<artifactId>mariadb-java-client</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>mysql</groupId>
|
||||
<artifactId>mysql-connector-java</artifactId>
|
||||
<groupId>com.mysql</groupId>
|
||||
<artifactId>mysql-connector-j</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.postgresql</groupId>
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-cli</artifactId>
|
||||
<version>6.5.2-SNAPSHOT</version>
|
||||
<version>6.5.3-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.5.2-SNAPSHOT</version>
|
||||
<version>6.5.3-SNAPSHOT</version>
|
||||
<relativePath>../../hapi-deployable-pom</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>6.5.2-SNAPSHOT</version>
|
||||
<version>6.5.3-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.5.2-SNAPSHOT</version>
|
||||
<version>6.5.3-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.5.2-SNAPSHOT</version>
|
||||
<version>6.5.3-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -197,43 +197,49 @@ public class LoggingInterceptor implements IClientInterceptor {
|
|||
/**
|
||||
* Should a summary (one line) for each request be logged, containing the URL and other information
|
||||
*/
|
||||
public void setLogRequestBody(boolean theValue) {
|
||||
public LoggingInterceptor setLogRequestBody(boolean theValue) {
|
||||
myLogRequestBody = theValue;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Should headers for each request be logged, containing the URL and other information
|
||||
*/
|
||||
public void setLogRequestHeaders(boolean theValue) {
|
||||
public LoggingInterceptor setLogRequestHeaders(boolean theValue) {
|
||||
myLogRequestHeaders = theValue;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Should a summary (one line) for each request be logged, containing the URL and other information
|
||||
*/
|
||||
public void setLogRequestSummary(boolean theValue) {
|
||||
public LoggingInterceptor setLogRequestSummary(boolean theValue) {
|
||||
myLogRequestSummary = theValue;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Should a summary (one line) for each request be logged, containing the URL and other information
|
||||
*/
|
||||
public void setLogResponseBody(boolean theValue) {
|
||||
public LoggingInterceptor setLogResponseBody(boolean theValue) {
|
||||
myLogResponseBody = theValue;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Should headers for each request be logged, containing the URL and other information
|
||||
*/
|
||||
public void setLogResponseHeaders(boolean theValue) {
|
||||
public LoggingInterceptor setLogResponseHeaders(boolean theValue) {
|
||||
myLogResponseHeaders = theValue;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Should a summary (one line) for each request be logged, containing the URL and other information
|
||||
*/
|
||||
public void setLogResponseSummary(boolean theValue) {
|
||||
public LoggingInterceptor setLogResponseSummary(boolean theValue) {
|
||||
myLogResponseSummary = theValue;
|
||||
return this;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.5.2-SNAPSHOT</version>
|
||||
<version>6.5.3-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir</artifactId>
|
||||
<version>6.5.2-SNAPSHOT</version>
|
||||
<version>6.5.3-SNAPSHOT</version>
|
||||
<relativePath>../pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.5.2-SNAPSHOT</version>
|
||||
<version>6.5.3-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -0,0 +1,6 @@
|
|||
---
|
||||
type: add
|
||||
issue: 4544
|
||||
title: "A new API has been added to the JPA server for storing resources which
|
||||
are actually placeholders for externally stored payloads. This API is not yet
|
||||
used for any published use cases but will be rolled out for future uses."
|
|
@ -0,0 +1,7 @@
|
|||
---
|
||||
type: add
|
||||
issue: 4544
|
||||
title: "The HapiTransactionService (HAPI FHIR JPA Transaction Manager) has been improved
|
||||
and used in a number of new places (replacing the @Transactional annotation). This
|
||||
means that stack traces have much less transaction logic noise, and give a clear
|
||||
indication of transaction boundaries."
|
|
@ -30,7 +30,8 @@ Creating your own interceptors is easy. Custom interceptor classes do not need t
|
|||
|
||||
* The method must have an appropriate return value for the chosen [Pointcut](/apidocs/hapi-fhir-base/ca/uhn/fhir/interceptor/api/Pointcut.html).
|
||||
|
||||
* The method may have any of the parameters specified for the given [Pointcut](/apidocs/hapi-fhir-base/ca/uhn/fhir/interceptor/api/Pointcut.html).
|
||||
* The method may have any of the parameters specified for the given [Pointcut](/apidocs/hapi-fhir-base/ca/uhn/fhir/interceptor/api/Pointcut.html).
|
||||
* A parameter of type [Pointcut](/apidocs/hapi-fhir-base/ca/uhn/fhir/interceptor/api/Pointcut.html) may also be added, and will be passed the actual Pointcut which triggered the invocation.
|
||||
|
||||
* The method must be public.
|
||||
|
||||
|
|
|
@ -11,7 +11,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.5.2-SNAPSHOT</version>
|
||||
<version>6.5.3-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.5.2-SNAPSHOT</version>
|
||||
<version>6.5.3-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.5.2-SNAPSHOT</version>
|
||||
<version>6.5.3-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.5.2-SNAPSHOT</version>
|
||||
<version>6.5.3-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -8,6 +8,7 @@ import ca.uhn.fhir.context.support.IValidationSupport;
|
|||
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.interceptor.api.IInterceptorService;
|
||||
import ca.uhn.fhir.interceptor.executor.InterceptorService;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.model.ExpungeOptions;
|
||||
|
@ -53,6 +54,7 @@ import ca.uhn.fhir.jpa.delete.DeleteConflictService;
|
|||
import ca.uhn.fhir.jpa.delete.ThreadSafeResourceDeleterSvc;
|
||||
import ca.uhn.fhir.jpa.entity.MdmLink;
|
||||
import ca.uhn.fhir.jpa.entity.Search;
|
||||
import ca.uhn.fhir.jpa.esr.ExternallyStoredResourceServiceRegistry;
|
||||
import ca.uhn.fhir.jpa.graphql.DaoRegistryGraphQLStorageServices;
|
||||
import ca.uhn.fhir.jpa.interceptor.CascadingDeleteInterceptor;
|
||||
import ca.uhn.fhir.jpa.interceptor.JpaConsentContextServices;
|
||||
|
@ -133,6 +135,7 @@ import ca.uhn.fhir.jpa.term.api.ITermConceptMappingSvc;
|
|||
import ca.uhn.fhir.jpa.term.api.ITermReadSvc;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermReindexingSvc;
|
||||
import ca.uhn.fhir.jpa.term.config.TermCodeSystemConfig;
|
||||
import ca.uhn.fhir.jpa.util.JpaHapiTransactionService;
|
||||
import ca.uhn.fhir.jpa.util.MemoryCacheService;
|
||||
import ca.uhn.fhir.jpa.validation.ResourceLoaderImpl;
|
||||
import ca.uhn.fhir.jpa.validation.ValidationSettings;
|
||||
|
@ -227,6 +230,11 @@ public class JpaConfig {
|
|||
return new CascadingDeleteInterceptor(theFhirContext, theDaoRegistry, theInterceptorBroadcaster, threadSafeResourceDeleterSvc);
|
||||
}
|
||||
|
||||
@Bean
|
||||
public ExternallyStoredResourceServiceRegistry ExternallyStoredResourceServiceRegistry() {
|
||||
return new ExternallyStoredResourceServiceRegistry();
|
||||
}
|
||||
|
||||
@Lazy
|
||||
@Bean
|
||||
public ThreadSafeResourceDeleterSvc safeDeleter(DaoRegistry theDaoRegistry, IInterceptorBroadcaster theInterceptorBroadcaster, HapiTransactionService hapiTransactionService) {
|
||||
|
@ -383,7 +391,7 @@ public class JpaConfig {
|
|||
|
||||
@Bean
|
||||
public HapiTransactionService hapiTransactionService() {
|
||||
return new HapiTransactionService();
|
||||
return new JpaHapiTransactionService();
|
||||
}
|
||||
|
||||
@Bean
|
||||
|
@ -524,8 +532,8 @@ public class JpaConfig {
|
|||
|
||||
@Bean(name = PERSISTED_JPA_SEARCH_FIRST_PAGE_BUNDLE_PROVIDER)
|
||||
@Scope("prototype")
|
||||
public PersistedJpaSearchFirstPageBundleProvider newPersistedJpaSearchFirstPageBundleProvider(RequestDetails theRequest, Search theSearch, SearchTask theSearchTask, ISearchBuilder theSearchBuilder) {
|
||||
return new PersistedJpaSearchFirstPageBundleProvider(theSearch, theSearchTask, theSearchBuilder, theRequest);
|
||||
public PersistedJpaSearchFirstPageBundleProvider newPersistedJpaSearchFirstPageBundleProvider(RequestDetails theRequest, Search theSearch, SearchTask theSearchTask, ISearchBuilder theSearchBuilder, RequestPartitionId theRequestPartitionId) {
|
||||
return new PersistedJpaSearchFirstPageBundleProvider(theSearch, theSearchTask, theSearchBuilder, theRequest, theRequestPartitionId);
|
||||
}
|
||||
|
||||
@Bean(name = RepositoryValidatingRuleBuilder.REPOSITORY_VALIDATING_RULE_BUILDER)
|
||||
|
|
|
@ -30,6 +30,9 @@ import ca.uhn.fhir.jpa.dao.index.SearchParamWithInlineReferencesExtractor;
|
|||
import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
|
||||
import ca.uhn.fhir.jpa.delete.DeleteConflictService;
|
||||
import ca.uhn.fhir.jpa.entity.PartitionEntity;
|
||||
import ca.uhn.fhir.jpa.esr.ExternallyStoredResourceAddress;
|
||||
import ca.uhn.fhir.jpa.esr.ExternallyStoredResourceAddressMetadataKey;
|
||||
import ca.uhn.fhir.jpa.esr.ExternallyStoredResourceServiceRegistry;
|
||||
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
||||
import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource;
|
||||
import ca.uhn.fhir.jpa.model.cross.IResourceLookup;
|
||||
|
@ -221,8 +224,14 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
@Autowired
|
||||
protected InMemoryResourceMatcher myInMemoryResourceMatcher;
|
||||
@Autowired
|
||||
protected IJpaStorageResourceParser myJpaStorageResourceParser;
|
||||
@Autowired
|
||||
protected PartitionSettings myPartitionSettings;
|
||||
@Autowired
|
||||
ExpungeService myExpungeService;
|
||||
@Autowired
|
||||
private ExternallyStoredResourceServiceRegistry myExternallyStoredResourceServiceRegistry;
|
||||
@Autowired
|
||||
private ISearchParamPresenceSvc mySearchParamPresenceSvc;
|
||||
@Autowired
|
||||
private SearchParamWithInlineReferencesExtractor mySearchParamWithInlineReferencesExtractor;
|
||||
|
@ -231,18 +240,18 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
private FhirContext myContext;
|
||||
private ApplicationContext myApplicationContext;
|
||||
@Autowired
|
||||
private PartitionSettings myPartitionSettings;
|
||||
@Autowired
|
||||
private IPartitionLookupSvc myPartitionLookupSvc;
|
||||
@Autowired
|
||||
private MemoryCacheService myMemoryCacheService;
|
||||
@Autowired(required = false)
|
||||
private IFulltextSearchSvc myFulltextSearchSvc;
|
||||
|
||||
@Autowired
|
||||
private PlatformTransactionManager myTransactionManager;
|
||||
@Autowired
|
||||
protected IJpaStorageResourceParser myJpaStorageResourceParser;
|
||||
|
||||
@VisibleForTesting
|
||||
public void setExternallyStoredResourceServiceRegistryForUnitTest(ExternallyStoredResourceServiceRegistry theExternallyStoredResourceServiceRegistry) {
|
||||
myExternallyStoredResourceServiceRegistry = theExternallyStoredResourceServiceRegistry;
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
public void setSearchParamPresenceSvc(ISearchParamPresenceSvc theSearchParamPresenceSvc) {
|
||||
|
@ -544,42 +553,58 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
|
||||
if (thePerformIndexing) {
|
||||
|
||||
encoding = myStorageSettings.getResourceEncoding();
|
||||
ExternallyStoredResourceAddress address = null;
|
||||
if (myExternallyStoredResourceServiceRegistry.hasProviders()) {
|
||||
address = ExternallyStoredResourceAddressMetadataKey.INSTANCE.get(theResource);
|
||||
}
|
||||
|
||||
String resourceType = theEntity.getResourceType();
|
||||
if (address != null) {
|
||||
|
||||
List<String> excludeElements = new ArrayList<>(8);
|
||||
IBaseMetaType meta = theResource.getMeta();
|
||||
|
||||
IBaseExtension<?, ?> sourceExtension = getExcludedElements(resourceType, excludeElements, meta);
|
||||
|
||||
theEntity.setFhirVersion(myContext.getVersion().getVersion());
|
||||
|
||||
HashFunction sha256 = Hashing.sha256();
|
||||
HashCode hashCode;
|
||||
String encodedResource = encodeResource(theResource, encoding, excludeElements, myContext);
|
||||
if (getStorageSettings().getInlineResourceTextBelowSize() > 0 && encodedResource.length() < getStorageSettings().getInlineResourceTextBelowSize()) {
|
||||
resourceText = encodedResource;
|
||||
encoding = ResourceEncodingEnum.ESR;
|
||||
resourceBinary = null;
|
||||
encoding = ResourceEncodingEnum.JSON;
|
||||
hashCode = sha256.hashUnencodedChars(encodedResource);
|
||||
} else {
|
||||
resourceText = null;
|
||||
resourceBinary = getResourceBinary(encoding, encodedResource);
|
||||
hashCode = sha256.hashBytes(resourceBinary);
|
||||
}
|
||||
|
||||
String hashSha256 = hashCode.toString();
|
||||
if (hashSha256.equals(theEntity.getHashSha256()) == false) {
|
||||
resourceText = address.getProviderId() + ":" + address.getLocation();
|
||||
changed = true;
|
||||
}
|
||||
theEntity.setHashSha256(hashSha256);
|
||||
|
||||
} else {
|
||||
|
||||
encoding = myStorageSettings.getResourceEncoding();
|
||||
|
||||
String resourceType = theEntity.getResourceType();
|
||||
|
||||
List<String> excludeElements = new ArrayList<>(8);
|
||||
IBaseMetaType meta = theResource.getMeta();
|
||||
|
||||
IBaseExtension<?, ?> sourceExtension = getExcludedElements(resourceType, excludeElements, meta);
|
||||
|
||||
theEntity.setFhirVersion(myContext.getVersion().getVersion());
|
||||
|
||||
HashFunction sha256 = Hashing.sha256();
|
||||
HashCode hashCode;
|
||||
String encodedResource = encodeResource(theResource, encoding, excludeElements, myContext);
|
||||
if (myStorageSettings.getInlineResourceTextBelowSize() > 0 && encodedResource.length() < myStorageSettings.getInlineResourceTextBelowSize()) {
|
||||
resourceText = encodedResource;
|
||||
resourceBinary = null;
|
||||
encoding = ResourceEncodingEnum.JSON;
|
||||
hashCode = sha256.hashUnencodedChars(encodedResource);
|
||||
} else {
|
||||
resourceText = null;
|
||||
resourceBinary = getResourceBinary(encoding, encodedResource);
|
||||
hashCode = sha256.hashBytes(resourceBinary);
|
||||
}
|
||||
|
||||
String hashSha256 = hashCode.toString();
|
||||
if (hashSha256.equals(theEntity.getHashSha256()) == false) {
|
||||
changed = true;
|
||||
}
|
||||
theEntity.setHashSha256(hashSha256);
|
||||
|
||||
|
||||
if (sourceExtension != null) {
|
||||
IBaseExtension<?, ?> newSourceExtension = ((IBaseHasExtensions) meta).addExtension();
|
||||
newSourceExtension.setUrl(sourceExtension.getUrl());
|
||||
newSourceExtension.setValue(sourceExtension.getValue());
|
||||
if (sourceExtension != null) {
|
||||
IBaseExtension<?, ?> newSourceExtension = ((IBaseHasExtensions) meta).addExtension();
|
||||
newSourceExtension.setUrl(sourceExtension.getUrl());
|
||||
newSourceExtension.setValue(sourceExtension.getValue());
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
} else {
|
||||
|
@ -662,6 +687,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
break;
|
||||
default:
|
||||
case DEL:
|
||||
case ESR:
|
||||
resourceBinary = new byte[0];
|
||||
break;
|
||||
}
|
||||
|
@ -700,7 +726,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
}
|
||||
}
|
||||
boolean allExtensionsRemoved = extensions.isEmpty();
|
||||
if(allExtensionsRemoved){
|
||||
if (allExtensionsRemoved) {
|
||||
hasExtensions = false;
|
||||
}
|
||||
}
|
||||
|
@ -866,8 +892,6 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
}
|
||||
|
||||
|
||||
|
||||
|
||||
String toResourceName(IBaseResource theResource) {
|
||||
return myContext.getResourceType(theResource);
|
||||
}
|
||||
|
@ -969,9 +993,9 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
List<Long> pids = existingParams
|
||||
.getResourceLinks()
|
||||
.stream()
|
||||
.map(t->t.getId())
|
||||
.map(t -> t.getId())
|
||||
.collect(Collectors.toList());
|
||||
new QueryChunker<Long>().chunk(pids, t->{
|
||||
new QueryChunker<Long>().chunk(pids, t -> {
|
||||
List<ResourceLink> targets = myResourceLinkDao.findByPidAndFetchTargetDetails(t);
|
||||
ourLog.trace("Prefetched targets: {}", targets);
|
||||
});
|
||||
|
@ -1579,6 +1603,14 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
myPartitionSettings = thePartitionSettings;
|
||||
}
|
||||
|
||||
/**
|
||||
* Do not call this method outside of unit tests
|
||||
*/
|
||||
@VisibleForTesting
|
||||
public void setJpaStorageResourceParserForUnitTest(IJpaStorageResourceParser theJpaStorageResourceParser) {
|
||||
myJpaStorageResourceParser = theJpaStorageResourceParser;
|
||||
}
|
||||
|
||||
private class AddTagDefinitionToCacheAfterCommitSynchronization implements TransactionSynchronization {
|
||||
|
||||
private final TagDefinition myTagDefinition;
|
||||
|
@ -1629,6 +1661,7 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
resourceText = GZipUtil.decompress(theResourceBytes);
|
||||
break;
|
||||
case DEL:
|
||||
case ESR:
|
||||
break;
|
||||
}
|
||||
return resourceText;
|
||||
|
@ -1688,12 +1721,4 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
ourValidationDisabledForUnitTest = theValidationDisabledForUnitTest;
|
||||
}
|
||||
|
||||
/**
|
||||
* Do not call this method outside of unit tests
|
||||
*/
|
||||
@VisibleForTesting
|
||||
public void setJpaStorageResourceParserForUnitTest(IJpaStorageResourceParser theJpaStorageResourceParser) {
|
||||
myJpaStorageResourceParser = theJpaStorageResourceParser;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -50,6 +50,7 @@ import ca.uhn.fhir.jpa.model.dao.JpaPid;
|
|||
import ca.uhn.fhir.jpa.model.entity.BaseHasResource;
|
||||
import ca.uhn.fhir.jpa.model.entity.BaseTag;
|
||||
import ca.uhn.fhir.jpa.model.entity.ForcedId;
|
||||
import ca.uhn.fhir.jpa.model.entity.PartitionablePartitionId;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.model.entity.TagDefinition;
|
||||
|
@ -99,6 +100,7 @@ import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException;
|
|||
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.ResourceVersionConflictException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
|
||||
import ca.uhn.fhir.rest.server.provider.ProviderConstants;
|
||||
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
||||
import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.util.ObjectUtil;
|
||||
|
@ -147,6 +149,7 @@ import java.util.List;
|
|||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.UUID;
|
||||
import java.util.concurrent.Callable;
|
||||
import java.util.function.Supplier;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
|
@ -242,7 +245,12 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
|
||||
@Override
|
||||
public DaoMethodOutcome create(T theResource, String theIfNoneExist, boolean thePerformIndexing, @Nonnull TransactionDetails theTransactionDetails, RequestDetails theRequestDetails) {
|
||||
return myTransactionService.execute(theRequestDetails, theTransactionDetails, tx -> doCreateForPost(theResource, theIfNoneExist, thePerformIndexing, theTransactionDetails, theRequestDetails));
|
||||
RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineCreatePartitionForRequest(theRequestDetails, theResource, getResourceName());
|
||||
return myTransactionService
|
||||
.withRequest(theRequestDetails)
|
||||
.withTransactionDetails(theTransactionDetails)
|
||||
.withRequestPartitionId(requestPartitionId)
|
||||
.execute(tx -> doCreateForPost(theResource, theIfNoneExist, thePerformIndexing, theTransactionDetails, theRequestDetails, requestPartitionId));
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
|
@ -253,7 +261,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
/**
|
||||
* Called for FHIR create (POST) operations
|
||||
*/
|
||||
protected DaoMethodOutcome doCreateForPost(T theResource, String theIfNoneExist, boolean thePerformIndexing, TransactionDetails theTransactionDetails, RequestDetails theRequestDetails) {
|
||||
protected DaoMethodOutcome doCreateForPost(T theResource, String theIfNoneExist, boolean thePerformIndexing, TransactionDetails theTransactionDetails, RequestDetails theRequestDetails, RequestPartitionId theRequestPartitionId) {
|
||||
if (theResource == null) {
|
||||
String msg = getContext().getLocalizer().getMessage(BaseStorageDao.class, "missingBody");
|
||||
throw new InvalidRequestException(Msg.code(956) + msg);
|
||||
|
@ -274,13 +282,12 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
theResource.setUserData(JpaConstants.RESOURCE_ID_SERVER_ASSIGNED, Boolean.TRUE);
|
||||
}
|
||||
|
||||
RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineCreatePartitionForRequest(theRequestDetails, theResource, getResourceName());
|
||||
return doCreateForPostOrPut(theRequestDetails, theResource, theIfNoneExist, true, thePerformIndexing, requestPartitionId, RestOperationTypeEnum.CREATE, theTransactionDetails);
|
||||
return doCreateForPostOrPut(theRequestDetails, theResource, theIfNoneExist, true, thePerformIndexing, theRequestPartitionId, RestOperationTypeEnum.CREATE, theTransactionDetails);
|
||||
}
|
||||
|
||||
/**
|
||||
* Called both for FHIR create (POST) operations (via {@link #doCreateForPost(IBaseResource, String, boolean, TransactionDetails, RequestDetails)}
|
||||
* as well as for FHIR update (PUT) where we're doing a create-with-client-assigned-ID (via {@link #doUpdate(IBaseResource, String, boolean, boolean, RequestDetails, TransactionDetails)}.
|
||||
* Called both for FHIR create (POST) operations (via {@link #doCreateForPost(IBaseResource, String, boolean, TransactionDetails, RequestDetails, RequestPartitionId)}
|
||||
* as well as for FHIR update (PUT) where we're doing a create-with-client-assigned-ID (via {@link #doUpdate(IBaseResource, String, boolean, boolean, RequestDetails, TransactionDetails, RequestPartitionId)}.
|
||||
*/
|
||||
private DaoMethodOutcome doCreateForPostOrPut(RequestDetails theRequest, T theResource, String theMatchUrl, boolean theProcessMatchUrl, boolean thePerformIndexing, RequestPartitionId theRequestPartitionId, RestOperationTypeEnum theOperationType, TransactionDetails theTransactionDetails) {
|
||||
StopWatch w = new StopWatch();
|
||||
|
@ -290,7 +297,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
|
||||
ResourceTable entity = new ResourceTable();
|
||||
entity.setResourceType(toResourceName(theResource));
|
||||
entity.setPartitionId(myRequestPartitionHelperService.toStoragePartition(theRequestPartitionId));
|
||||
entity.setPartitionId(PartitionablePartitionId.toStoragePartition(theRequestPartitionId, myPartitionSettings));
|
||||
entity.setCreatedByMatchUrl(theMatchUrl);
|
||||
entity.setVersion(1);
|
||||
|
||||
|
@ -895,10 +902,16 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
}
|
||||
|
||||
@Override
|
||||
@Transactional
|
||||
public IBundleProvider history(Date theSince, Date theUntil, Integer theOffset, RequestDetails theRequestDetails) {
|
||||
StopWatch w = new StopWatch();
|
||||
IBundleProvider retVal = myPersistedJpaBundleProviderFactory.history(theRequestDetails, myResourceName, null, theSince, theUntil, theOffset);
|
||||
ReadPartitionIdRequestDetails details = ReadPartitionIdRequestDetails.forHistory(myResourceName, null);
|
||||
RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineReadPartitionForRequest(theRequestDetails, details);
|
||||
IBundleProvider retVal = myTransactionService
|
||||
.withRequest(theRequestDetails)
|
||||
.withRequestPartitionId(requestPartitionId)
|
||||
.execute(() -> {
|
||||
return myPersistedJpaBundleProviderFactory.history(theRequestDetails, myResourceName, null, theSince, theUntil, theOffset, requestPartitionId);
|
||||
});
|
||||
ourLog.debug("Processed history on {} in {}ms", myResourceName, w.getMillisAndRestart());
|
||||
return retVal;
|
||||
}
|
||||
|
@ -907,35 +920,48 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
* @deprecated Use {@link #history(IIdType, HistorySearchDateRangeParam, RequestDetails)} instead
|
||||
*/
|
||||
@Override
|
||||
@Transactional
|
||||
public IBundleProvider history(final IIdType theId, final Date theSince, Date theUntil, Integer theOffset, RequestDetails theRequest) {
|
||||
StopWatch w = new StopWatch();
|
||||
|
||||
IIdType id = theId.withResourceType(myResourceName).toUnqualifiedVersionless();
|
||||
BaseHasResource entity = readEntity(id, theRequest);
|
||||
ReadPartitionIdRequestDetails details = ReadPartitionIdRequestDetails.forHistory(myResourceName, theId);
|
||||
RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineReadPartitionForRequest(theRequest, details);
|
||||
IBundleProvider retVal = myTransactionService
|
||||
.withRequest(theRequest)
|
||||
.withRequestPartitionId(requestPartitionId)
|
||||
.execute(() -> {
|
||||
IIdType id = theId.withResourceType(myResourceName).toUnqualifiedVersionless();
|
||||
BaseHasResource entity = readEntity(id, true, theRequest, requestPartitionId);
|
||||
|
||||
IBundleProvider retVal = myPersistedJpaBundleProviderFactory.history(theRequest, myResourceName, entity.getId(), theSince, theUntil, theOffset);
|
||||
return myPersistedJpaBundleProviderFactory.history(theRequest, myResourceName, entity.getId(), theSince, theUntil, theOffset, requestPartitionId);
|
||||
});
|
||||
|
||||
ourLog.debug("Processed history on {} in {}ms", id, w.getMillisAndRestart());
|
||||
ourLog.debug("Processed history on {} in {}ms", theId, w.getMillisAndRestart());
|
||||
return retVal;
|
||||
}
|
||||
|
||||
@Override
|
||||
@Transactional
|
||||
public IBundleProvider history(final IIdType theId, final HistorySearchDateRangeParam theHistorySearchDateRangeParam,
|
||||
RequestDetails theRequest) {
|
||||
StopWatch w = new StopWatch();
|
||||
ReadPartitionIdRequestDetails details = ReadPartitionIdRequestDetails.forHistory(myResourceName, theId);
|
||||
RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineReadPartitionForRequest(theRequest, details);
|
||||
IBundleProvider retVal = myTransactionService
|
||||
.withRequest(theRequest)
|
||||
.withRequestPartitionId(requestPartitionId)
|
||||
.execute(() -> {
|
||||
IIdType id = theId.withResourceType(myResourceName).toUnqualifiedVersionless();
|
||||
BaseHasResource entity = readEntity(id, true, theRequest, requestPartitionId);
|
||||
|
||||
IIdType id = theId.withResourceType(myResourceName).toUnqualifiedVersionless();
|
||||
BaseHasResource entity = readEntity(id, theRequest);
|
||||
return myPersistedJpaBundleProviderFactory.history(theRequest, myResourceName, entity.getId(),
|
||||
theHistorySearchDateRangeParam.getLowerBoundAsInstant(),
|
||||
theHistorySearchDateRangeParam.getUpperBoundAsInstant(),
|
||||
theHistorySearchDateRangeParam.getOffset(),
|
||||
theHistorySearchDateRangeParam.getHistorySearchType(),
|
||||
requestPartitionId
|
||||
);
|
||||
});
|
||||
|
||||
IBundleProvider retVal = myPersistedJpaBundleProviderFactory.history(theRequest, myResourceName, entity.getId(),
|
||||
theHistorySearchDateRangeParam.getLowerBoundAsInstant(),
|
||||
theHistorySearchDateRangeParam.getUpperBoundAsInstant(),
|
||||
theHistorySearchDateRangeParam.getOffset(),
|
||||
theHistorySearchDateRangeParam.getHistorySearchType());
|
||||
|
||||
ourLog.debug("Processed history on {} in {}ms", id, w.getMillisAndRestart());
|
||||
ourLog.debug("Processed history on {} in {}ms", theId, w.getMillisAndRestart());
|
||||
return retVal;
|
||||
}
|
||||
|
||||
|
@ -962,8 +988,8 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
addAllResourcesTypesToReindex(theBase, theRequestDetails, params);
|
||||
}
|
||||
|
||||
ReadPartitionIdRequestDetails details = new ReadPartitionIdRequestDetails(null, RestOperationTypeEnum.EXTENDED_OPERATION_SERVER, null, null, null);
|
||||
RequestPartitionId requestPartition = myRequestPartitionHelperService.determineReadPartitionForRequest(theRequestDetails, null, details);
|
||||
ReadPartitionIdRequestDetails details = ReadPartitionIdRequestDetails.forOperation(null, null, ProviderConstants.OPERATION_REINDEX);
|
||||
RequestPartitionId requestPartition = myRequestPartitionHelperService.determineReadPartitionForRequest(theRequestDetails, details);
|
||||
params.setRequestPartitionId(requestPartition);
|
||||
|
||||
JobInstanceStartRequest request = new JobInstanceStartRequest();
|
||||
|
@ -1160,14 +1186,20 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
validateResourceTypeAndThrowInvalidRequestException(theId);
|
||||
TransactionDetails transactionDetails = new TransactionDetails();
|
||||
|
||||
return myTransactionService.execute(theRequest, transactionDetails, tx -> doRead(theId, theRequest, theDeletedOk));
|
||||
RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineReadPartitionForRequestForRead(theRequest, myResourceName, theId);
|
||||
|
||||
return myTransactionService
|
||||
.withRequest(theRequest)
|
||||
.withTransactionDetails(transactionDetails)
|
||||
.withRequestPartitionId(requestPartitionId)
|
||||
.execute(() -> doReadInTransaction(theId, theRequest, theDeletedOk, requestPartitionId));
|
||||
}
|
||||
|
||||
public T doRead(IIdType theId, RequestDetails theRequest, boolean theDeletedOk) {
|
||||
private T doReadInTransaction(IIdType theId, RequestDetails theRequest, boolean theDeletedOk, RequestPartitionId theRequestPartitionId) {
|
||||
assert TransactionSynchronizationManager.isActualTransactionActive();
|
||||
|
||||
StopWatch w = new StopWatch();
|
||||
BaseHasResource entity = readEntity(theId, theRequest);
|
||||
BaseHasResource entity = readEntity(theId, true, theRequest, theRequestPartitionId);
|
||||
validateResourceType(entity);
|
||||
|
||||
T retVal = myJpaStorageResourceParser.toResource(myResourceType, entity, null, false);
|
||||
|
@ -1214,9 +1246,12 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
}
|
||||
|
||||
@Override
|
||||
@Transactional
|
||||
public BaseHasResource readEntity(IIdType theId, RequestDetails theRequest) {
|
||||
return readEntity(theId, true, theRequest);
|
||||
RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineReadPartitionForRequestForRead(theRequest, myResourceName, theId);
|
||||
return myTransactionService
|
||||
.withRequest(theRequest)
|
||||
.withRequestPartitionId(requestPartitionId)
|
||||
.execute(() -> readEntity(theId, true, theRequest, requestPartitionId));
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
|
@ -1245,13 +1280,9 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
@Transactional
|
||||
public BaseHasResource readEntity(IIdType theId, boolean theCheckForForcedId, RequestDetails theRequest) {
|
||||
private BaseHasResource readEntity(IIdType theId, boolean theCheckForForcedId, RequestDetails theRequest, RequestPartitionId requestPartitionId) {
|
||||
validateResourceTypeAndThrowInvalidRequestException(theId);
|
||||
|
||||
RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineReadPartitionForRequestForRead(theRequest, getResourceName(), theId);
|
||||
|
||||
BaseHasResource entity;
|
||||
JpaPid pid = myIdHelperService.resolveResourcePersistentIds(requestPartitionId, getResourceName(), theId.getIdPart());
|
||||
Set<Integer> readPartitions = null;
|
||||
|
@ -1361,15 +1392,16 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
}
|
||||
|
||||
@Override
|
||||
public void reindex(T theResource, ResourceTable theEntity) {
|
||||
public void reindex(T theResource, IBasePersistedResource theEntity) {
|
||||
assert TransactionSynchronizationManager.isActualTransactionActive();
|
||||
|
||||
ourLog.debug("Indexing resource {} - PID {}", theEntity.getIdDt().getValue(), theEntity.getId());
|
||||
ourLog.debug("Indexing resource {} - PID {}", theEntity.getIdDt().getValue(), theEntity.getPersistentId());
|
||||
if (theResource != null) {
|
||||
CURRENTLY_REINDEXING.put(theResource, Boolean.TRUE);
|
||||
}
|
||||
|
||||
TransactionDetails transactionDetails = new TransactionDetails(theEntity.getUpdatedDate());
|
||||
ResourceTable entity = (ResourceTable) theEntity;
|
||||
TransactionDetails transactionDetails = new TransactionDetails(entity.getUpdatedDate());
|
||||
ResourceTable resourceTable = updateEntity(null, theResource, theEntity, theEntity.getDeleted(), true, false, transactionDetails, true, false);
|
||||
if (theResource != null) {
|
||||
CURRENTLY_REINDEXING.put(theResource, null);
|
||||
|
@ -1450,6 +1482,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
|
||||
if (retVal instanceof PersistedJpaBundleProvider) {
|
||||
PersistedJpaBundleProvider provider = (PersistedJpaBundleProvider) retVal;
|
||||
provider.setRequestPartitionId(requestPartitionId);
|
||||
if (provider.getCacheStatus() == SearchCacheStatusEnum.HIT) {
|
||||
if (theServletResponse != null && theRequest != null) {
|
||||
String value = "HIT from " + theRequest.getFhirServerBase();
|
||||
|
@ -1520,33 +1553,37 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
@Override
|
||||
public List<JpaPid> searchForIds(SearchParameterMap theParams, RequestDetails theRequest, @Nullable IBaseResource theConditionalOperationTargetOrNull) {
|
||||
TransactionDetails transactionDetails = new TransactionDetails();
|
||||
RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineReadPartitionForRequestForSearchType(theRequest, myResourceName, theParams, theConditionalOperationTargetOrNull);
|
||||
|
||||
return myTransactionService.execute(theRequest, transactionDetails, tx -> {
|
||||
return myTransactionService
|
||||
.withRequest(theRequest)
|
||||
.withTransactionDetails(transactionDetails)
|
||||
.withRequestPartitionId(requestPartitionId)
|
||||
.execute(() -> {
|
||||
|
||||
if (theParams.getLoadSynchronousUpTo() != null) {
|
||||
theParams.setLoadSynchronousUpTo(Math.min(getStorageSettings().getInternalSynchronousSearchSize(), theParams.getLoadSynchronousUpTo()));
|
||||
} else {
|
||||
theParams.setLoadSynchronousUpTo(getStorageSettings().getInternalSynchronousSearchSize());
|
||||
}
|
||||
|
||||
ISearchBuilder builder = mySearchBuilderFactory.newSearchBuilder(this, getResourceName(), getResourceType());
|
||||
|
||||
List<JpaPid> ids = new ArrayList<>();
|
||||
|
||||
String uuid = UUID.randomUUID().toString();
|
||||
RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineReadPartitionForRequestForSearchType(theRequest, getResourceName(), theParams, theConditionalOperationTargetOrNull);
|
||||
|
||||
SearchRuntimeDetails searchRuntimeDetails = new SearchRuntimeDetails(theRequest, uuid);
|
||||
try (IResultIterator<JpaPid> iter = builder.createQuery(theParams, searchRuntimeDetails, theRequest, requestPartitionId)) {
|
||||
while (iter.hasNext()) {
|
||||
ids.add(iter.next());
|
||||
if (theParams.getLoadSynchronousUpTo() != null) {
|
||||
theParams.setLoadSynchronousUpTo(Math.min(myStorageSettings.getInternalSynchronousSearchSize(), theParams.getLoadSynchronousUpTo()));
|
||||
} else {
|
||||
theParams.setLoadSynchronousUpTo(myStorageSettings.getInternalSynchronousSearchSize());
|
||||
}
|
||||
} catch (IOException e) {
|
||||
ourLog.error("IO failure during database access", e);
|
||||
}
|
||||
|
||||
return ids;
|
||||
});
|
||||
ISearchBuilder builder = mySearchBuilderFactory.newSearchBuilder(this, getResourceName(), getResourceType());
|
||||
|
||||
List<JpaPid> ids = new ArrayList<>();
|
||||
|
||||
String uuid = UUID.randomUUID().toString();
|
||||
|
||||
SearchRuntimeDetails searchRuntimeDetails = new SearchRuntimeDetails(theRequest, uuid);
|
||||
try (IResultIterator<JpaPid> iter = builder.createQuery(theParams, searchRuntimeDetails, theRequest, requestPartitionId)) {
|
||||
while (iter.hasNext()) {
|
||||
ids.add(iter.next());
|
||||
}
|
||||
} catch (IOException e) {
|
||||
ourLog.error("IO failure during database access", e);
|
||||
}
|
||||
|
||||
return ids;
|
||||
});
|
||||
}
|
||||
|
||||
protected <MT extends IBaseMetaType> MT toMetaDt(Class<MT> theType, Collection<TagDefinition> tagDefinitions) {
|
||||
|
@ -1634,15 +1671,25 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
String id = theResource.getIdElement().getValue();
|
||||
Runnable onRollback = () -> theResource.getIdElement().setValue(id);
|
||||
|
||||
// Execute the update in a retryable transaction
|
||||
RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineCreatePartitionForRequest(theRequest, theResource, getResourceName());
|
||||
|
||||
Callable<DaoMethodOutcome> updateCallback;
|
||||
if (myStorageSettings.isUpdateWithHistoryRewriteEnabled() && theRequest != null && theRequest.isRewriteHistory()) {
|
||||
return myTransactionService.execute(theRequest, theTransactionDetails, tx -> doUpdateWithHistoryRewrite(theResource, theRequest, theTransactionDetails), onRollback);
|
||||
updateCallback = () -> doUpdateWithHistoryRewrite(theResource, theRequest, theTransactionDetails, requestPartitionId);
|
||||
} else {
|
||||
return myTransactionService.execute(theRequest, theTransactionDetails, tx -> doUpdate(theResource, theMatchUrl, thePerformIndexing, theForceUpdateVersion, theRequest, theTransactionDetails), onRollback);
|
||||
updateCallback = () -> doUpdate(theResource, theMatchUrl, thePerformIndexing, theForceUpdateVersion, theRequest, theTransactionDetails, requestPartitionId);
|
||||
}
|
||||
|
||||
// Execute the update in a retryable transaction
|
||||
return myTransactionService
|
||||
.withRequest(theRequest)
|
||||
.withTransactionDetails(theTransactionDetails)
|
||||
.withRequestPartitionId(requestPartitionId)
|
||||
.onRollback(onRollback)
|
||||
.execute(updateCallback);
|
||||
}
|
||||
|
||||
private DaoMethodOutcome doUpdate(T theResource, String theMatchUrl, boolean thePerformIndexing, boolean theForceUpdateVersion, RequestDetails theRequest, TransactionDetails theTransactionDetails) {
|
||||
private DaoMethodOutcome doUpdate(T theResource, String theMatchUrl, boolean thePerformIndexing, boolean theForceUpdateVersion, RequestDetails theRequest, TransactionDetails theTransactionDetails, RequestPartitionId theRequestPartitionId) {
|
||||
T resource = theResource;
|
||||
|
||||
preProcessResourceForStorage(resource);
|
||||
|
@ -1662,8 +1709,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
entity = myEntityManager.find(ResourceTable.class, pid.getId());
|
||||
resourceId = entity.getIdDt();
|
||||
} else {
|
||||
RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineCreatePartitionForRequest(theRequest, theResource, getResourceName());
|
||||
DaoMethodOutcome outcome = doCreateForPostOrPut(theRequest, resource, theMatchUrl, false, thePerformIndexing, requestPartitionId, update, theTransactionDetails);
|
||||
DaoMethodOutcome outcome = doCreateForPostOrPut(theRequest, resource, theMatchUrl, false, thePerformIndexing, theRequestPartitionId, update, theTransactionDetails);
|
||||
|
||||
// Pre-cache the match URL
|
||||
if (outcome.getPersistentId() != null) {
|
||||
|
@ -1682,8 +1728,6 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
assert resourceId != null;
|
||||
assert resourceId.hasIdPart();
|
||||
|
||||
RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineCreatePartitionForRequest(theRequest, theResource, getResourceName());
|
||||
|
||||
boolean create = false;
|
||||
|
||||
if (theRequest != null) {
|
||||
|
@ -1695,14 +1739,14 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
|
||||
if (!create) {
|
||||
try {
|
||||
entity = readEntityLatestVersion(resourceId, requestPartitionId, theTransactionDetails);
|
||||
entity = readEntityLatestVersion(resourceId, theRequestPartitionId, theTransactionDetails);
|
||||
} catch (ResourceNotFoundException e) {
|
||||
create = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (create) {
|
||||
return doCreateForPostOrPut(theRequest, resource, null, false, thePerformIndexing, requestPartitionId, update, theTransactionDetails);
|
||||
return doCreateForPostOrPut(theRequest, resource, null, false, thePerformIndexing, theRequestPartitionId, update, theTransactionDetails);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1720,7 +1764,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
* @param theTransactionDetails details of the transaction
|
||||
* @return the outcome of the operation
|
||||
*/
|
||||
private DaoMethodOutcome doUpdateWithHistoryRewrite(T theResource, RequestDetails theRequest, TransactionDetails theTransactionDetails) {
|
||||
private DaoMethodOutcome doUpdateWithHistoryRewrite(T theResource, RequestDetails theRequest, TransactionDetails theTransactionDetails, RequestPartitionId theRequestPartitionId) {
|
||||
StopWatch w = new StopWatch();
|
||||
|
||||
// No need for indexing as this will update a non-current version of the resource which will not be searchable
|
||||
|
@ -1736,7 +1780,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
assert resourceId.hasIdPart();
|
||||
|
||||
try {
|
||||
currentEntity = readEntityLatestVersion(resourceId.toVersionless(), theRequest, theTransactionDetails);
|
||||
currentEntity = readEntityLatestVersion(resourceId.toVersionless(), theRequestPartitionId, theTransactionDetails);
|
||||
|
||||
if (!resourceId.hasVersionIdPart()) {
|
||||
throw new InvalidRequestException(Msg.code(2093) + "Invalid resource ID, ID must contain a history version");
|
||||
|
|
|
@ -3,6 +3,8 @@ package ca.uhn.fhir.jpa.dao;
|
|||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.interceptor.model.ReadPartitionIdRequestDetails;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
|
||||
import ca.uhn.fhir.jpa.api.model.ExpungeOptions;
|
||||
|
@ -10,9 +12,12 @@ import ca.uhn.fhir.jpa.api.model.ExpungeOutcome;
|
|||
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceTagDao;
|
||||
import ca.uhn.fhir.jpa.dao.expunge.ExpungeService;
|
||||
import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
|
||||
import ca.uhn.fhir.jpa.dao.tx.IHapiTransactionService;
|
||||
import ca.uhn.fhir.jpa.model.dao.JpaPid;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
|
||||
import ca.uhn.fhir.jpa.search.PersistedJpaBundleProviderFactory;
|
||||
import ca.uhn.fhir.jpa.search.builder.SearchBuilder;
|
||||
import ca.uhn.fhir.jpa.util.QueryChunker;
|
||||
|
@ -88,6 +93,10 @@ public abstract class BaseHapiFhirSystemDao<T extends IBaseBundle, MT> extends B
|
|||
private IResourceTagDao myResourceTagDao;
|
||||
@Autowired
|
||||
private IInterceptorBroadcaster myInterceptorBroadcaster;
|
||||
@Autowired
|
||||
private IRequestPartitionHelperSvc myRequestPartitionHelperService;
|
||||
@Autowired
|
||||
private IHapiTransactionService myTransactionService;
|
||||
|
||||
@VisibleForTesting
|
||||
public void setTransactionProcessorForUnitTest(TransactionProcessor theTransactionProcessor) {
|
||||
|
@ -124,7 +133,6 @@ public abstract class BaseHapiFhirSystemDao<T extends IBaseBundle, MT> extends B
|
|||
return retVal;
|
||||
}
|
||||
|
||||
@Transactional(propagation = Propagation.SUPPORTS)
|
||||
@Nullable
|
||||
@Override
|
||||
public Map<String, Long> getResourceCountsFromCache() {
|
||||
|
@ -138,32 +146,37 @@ public abstract class BaseHapiFhirSystemDao<T extends IBaseBundle, MT> extends B
|
|||
@Override
|
||||
public IBundleProvider history(Date theSince, Date theUntil, Integer theOffset, RequestDetails theRequestDetails) {
|
||||
StopWatch w = new StopWatch();
|
||||
IBundleProvider retVal = myPersistedJpaBundleProviderFactory.history(theRequestDetails, null, null, theSince, theUntil, theOffset);
|
||||
ReadPartitionIdRequestDetails details = ReadPartitionIdRequestDetails.forHistory(null, null);
|
||||
RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineReadPartitionForRequest(theRequestDetails, details);
|
||||
IBundleProvider retVal = myTransactionService
|
||||
.withRequest(theRequestDetails)
|
||||
.withRequestPartitionId(requestPartitionId)
|
||||
.execute(() -> myPersistedJpaBundleProviderFactory.history(theRequestDetails, null, null, theSince, theUntil, theOffset, requestPartitionId));
|
||||
ourLog.info("Processed global history in {}ms", w.getMillisAndRestart());
|
||||
return retVal;
|
||||
}
|
||||
|
||||
@Override
|
||||
@Transactional(propagation = Propagation.NEVER)
|
||||
public T transaction(RequestDetails theRequestDetails, T theRequest) {
|
||||
HapiTransactionService.noTransactionAllowed();
|
||||
return myTransactionProcessor.transaction(theRequestDetails, theRequest, false);
|
||||
}
|
||||
|
||||
@Override
|
||||
@Transactional(propagation = Propagation.MANDATORY)
|
||||
public T transactionNested(RequestDetails theRequestDetails, T theRequest) {
|
||||
HapiTransactionService.requireTransaction();
|
||||
return myTransactionProcessor.transaction(theRequestDetails, theRequest, true);
|
||||
}
|
||||
|
||||
@Override
|
||||
@Transactional(propagation = Propagation.MANDATORY)
|
||||
public <P extends IResourcePersistentId> void preFetchResources(List<P> theResolvedIds) {
|
||||
HapiTransactionService.requireTransaction();
|
||||
List<Long> pids = theResolvedIds
|
||||
.stream()
|
||||
.map(t -> ((JpaPid) t).getId())
|
||||
.collect(Collectors.toList());
|
||||
|
||||
new QueryChunker<Long>().chunk(pids, ids->{
|
||||
new QueryChunker<Long>().chunk(pids, ids -> {
|
||||
|
||||
/*
|
||||
* Pre-fetch the resources we're touching in this transaction in mass - this reduced the
|
||||
|
@ -182,32 +195,32 @@ public abstract class BaseHapiFhirSystemDao<T extends IBaseBundle, MT> extends B
|
|||
|
||||
List<Long> entityIds;
|
||||
|
||||
entityIds = loadedResourceTableEntries.stream().filter(t -> t.isParamsStringPopulated()).map(t->t.getId()).collect(Collectors.toList());
|
||||
entityIds = loadedResourceTableEntries.stream().filter(t -> t.isParamsStringPopulated()).map(t -> t.getId()).collect(Collectors.toList());
|
||||
if (entityIds.size() > 0) {
|
||||
preFetchIndexes(entityIds, "string", "myParamsString", null);
|
||||
}
|
||||
|
||||
entityIds = loadedResourceTableEntries.stream().filter(t -> t.isParamsTokenPopulated()).map(t->t.getId()).collect(Collectors.toList());
|
||||
entityIds = loadedResourceTableEntries.stream().filter(t -> t.isParamsTokenPopulated()).map(t -> t.getId()).collect(Collectors.toList());
|
||||
if (entityIds.size() > 0) {
|
||||
preFetchIndexes(entityIds, "token", "myParamsToken", null);
|
||||
}
|
||||
|
||||
entityIds = loadedResourceTableEntries.stream().filter(t -> t.isParamsDatePopulated()).map(t->t.getId()).collect(Collectors.toList());
|
||||
entityIds = loadedResourceTableEntries.stream().filter(t -> t.isParamsDatePopulated()).map(t -> t.getId()).collect(Collectors.toList());
|
||||
if (entityIds.size() > 0) {
|
||||
preFetchIndexes(entityIds, "date", "myParamsDate", null);
|
||||
}
|
||||
|
||||
entityIds = loadedResourceTableEntries.stream().filter(t -> t.isParamsQuantityPopulated()).map(t->t.getId()).collect(Collectors.toList());
|
||||
entityIds = loadedResourceTableEntries.stream().filter(t -> t.isParamsQuantityPopulated()).map(t -> t.getId()).collect(Collectors.toList());
|
||||
if (entityIds.size() > 0) {
|
||||
preFetchIndexes(entityIds, "quantity", "myParamsQuantity", null);
|
||||
}
|
||||
|
||||
entityIds = loadedResourceTableEntries.stream().filter(t -> t.isHasLinks()).map(t->t.getId()).collect(Collectors.toList());
|
||||
entityIds = loadedResourceTableEntries.stream().filter(t -> t.isHasLinks()).map(t -> t.getId()).collect(Collectors.toList());
|
||||
if (entityIds.size() > 0) {
|
||||
preFetchIndexes(entityIds, "resourceLinks", "myResourceLinks", null);
|
||||
}
|
||||
|
||||
entityIds = loadedResourceTableEntries.stream().filter(t -> t.isHasTags()).map(t->t.getId()).collect(Collectors.toList());
|
||||
entityIds = loadedResourceTableEntries.stream().filter(t -> t.isHasTags()).map(t -> t.getId()).collect(Collectors.toList());
|
||||
if (entityIds.size() > 0) {
|
||||
myResourceTagDao.findByResourceIds(entityIds);
|
||||
preFetchIndexes(entityIds, "tags", "myTags", null);
|
||||
|
@ -255,7 +268,7 @@ public abstract class BaseHapiFhirSystemDao<T extends IBaseBundle, MT> extends B
|
|||
}
|
||||
|
||||
private void preFetchIndexes(List<Long> theIds, String typeDesc, String fieldName, @Nullable List<ResourceTable> theEntityListToPopulate) {
|
||||
new QueryChunker<Long>().chunk(theIds, ids->{
|
||||
new QueryChunker<Long>().chunk(theIds, ids -> {
|
||||
TypedQuery<ResourceTable> query = myEntityManager.createQuery("FROM ResourceTable r LEFT JOIN FETCH r." + fieldName + " WHERE r.myId IN ( :IDS )", ResourceTable.class);
|
||||
query.setParameter("IDS", ids);
|
||||
List<ResourceTable> indexFetchOutcome = query.getResultList();
|
||||
|
|
|
@ -29,6 +29,8 @@ import ca.uhn.fhir.jpa.api.dao.IDao;
|
|||
import ca.uhn.fhir.jpa.dao.data.IResourceHistoryTableDao;
|
||||
import ca.uhn.fhir.jpa.entity.PartitionEntity;
|
||||
import ca.uhn.fhir.jpa.entity.ResourceSearchView;
|
||||
import ca.uhn.fhir.jpa.esr.ExternallyStoredResourceServiceRegistry;
|
||||
import ca.uhn.fhir.jpa.esr.IExternallyStoredResourceService;
|
||||
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
||||
import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource;
|
||||
import ca.uhn.fhir.jpa.model.entity.BaseTag;
|
||||
|
@ -87,6 +89,8 @@ public class JpaStorageResourceParser implements IJpaStorageResourceParser {
|
|||
private PartitionSettings myPartitionSettings;
|
||||
@Autowired
|
||||
private IPartitionLookupSvc myPartitionLookupSvc;
|
||||
@Autowired
|
||||
private ExternallyStoredResourceServiceRegistry myExternallyStoredResourceServiceRegistry;
|
||||
|
||||
@Override
|
||||
public IBaseResource toResource(IBasePersistedResource theEntity, boolean theForHistoryOperation) {
|
||||
|
@ -231,18 +235,29 @@ public class JpaStorageResourceParser implements IJpaStorageResourceParser {
|
|||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private <R extends IBaseResource> R parseResource(IBaseResourceEntity theEntity, ResourceEncodingEnum resourceEncoding, String decodedResourceText, Class<R> resourceType) {
|
||||
private <R extends IBaseResource> R parseResource(IBaseResourceEntity theEntity, ResourceEncodingEnum theResourceEncoding, String theDecodedResourceText, Class<R> theResourceType) {
|
||||
R retVal;
|
||||
if (resourceEncoding != ResourceEncodingEnum.DEL) {
|
||||
if (theResourceEncoding == ResourceEncodingEnum.ESR) {
|
||||
|
||||
int colonIndex = theDecodedResourceText.indexOf(':');
|
||||
Validate.isTrue(colonIndex > 0, "Invalid ESR address: %s", theDecodedResourceText);
|
||||
String providerId = theDecodedResourceText.substring(0, colonIndex);
|
||||
String address = theDecodedResourceText.substring(colonIndex + 1);
|
||||
Validate.notBlank(providerId, "No provider ID in ESR address: %s", theDecodedResourceText);
|
||||
Validate.notBlank(address, "No address in ESR address: %s", theDecodedResourceText);
|
||||
IExternallyStoredResourceService provider = myExternallyStoredResourceServiceRegistry.getProvider(providerId);
|
||||
retVal = (R) provider.fetchResource(address);
|
||||
|
||||
} else if (theResourceEncoding != ResourceEncodingEnum.DEL) {
|
||||
|
||||
IParser parser = new TolerantJsonParser(getContext(theEntity.getFhirVersion()), LENIENT_ERROR_HANDLER, theEntity.getId());
|
||||
|
||||
try {
|
||||
retVal = parser.parseResource(resourceType, decodedResourceText);
|
||||
retVal = parser.parseResource(theResourceType, theDecodedResourceText);
|
||||
} catch (Exception e) {
|
||||
StringBuilder b = new StringBuilder();
|
||||
b.append("Failed to parse database resource[");
|
||||
b.append(myFhirContext.getResourceType(resourceType));
|
||||
b.append(myFhirContext.getResourceType(theResourceType));
|
||||
b.append("/");
|
||||
b.append(theEntity.getIdDt().getIdPart());
|
||||
b.append(" (pid ");
|
||||
|
|
|
@ -23,6 +23,8 @@ package ca.uhn.fhir.jpa.dao.expunge;
|
|||
import ca.uhn.fhir.interceptor.api.HookParams;
|
||||
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||
import ca.uhn.fhir.interceptor.model.ReadPartitionIdRequestDetails;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
|
||||
import ca.uhn.fhir.jpa.entity.Batch2JobInstanceEntity;
|
||||
import ca.uhn.fhir.jpa.entity.Batch2WorkChunkEntity;
|
||||
|
@ -69,8 +71,10 @@ import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
|||
import ca.uhn.fhir.jpa.model.entity.ResourceTag;
|
||||
import ca.uhn.fhir.jpa.model.entity.SearchParamPresentEntity;
|
||||
import ca.uhn.fhir.jpa.model.entity.TagDefinition;
|
||||
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
|
||||
import ca.uhn.fhir.jpa.util.MemoryCacheService;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.server.provider.ProviderConstants;
|
||||
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
||||
import ca.uhn.fhir.rest.server.util.CompositeInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.util.StopWatch;
|
||||
|
@ -96,12 +100,13 @@ public class ExpungeEverythingService implements IExpungeEverythingService {
|
|||
@PersistenceContext(type = PersistenceContextType.TRANSACTION)
|
||||
protected EntityManager myEntityManager;
|
||||
@Autowired
|
||||
protected IInterceptorBroadcaster myInterceptorBroadcaster;
|
||||
@Autowired
|
||||
private HapiTransactionService myTxService;
|
||||
@Autowired
|
||||
protected IInterceptorBroadcaster myInterceptorBroadcaster;
|
||||
|
||||
@Autowired
|
||||
private MemoryCacheService myMemoryCacheService;
|
||||
@Autowired
|
||||
private IRequestPartitionHelperSvc myRequestPartitionHelperSvc;
|
||||
|
||||
private int deletedResourceEntityCount;
|
||||
|
||||
|
@ -118,67 +123,71 @@ public class ExpungeEverythingService implements IExpungeEverythingService {
|
|||
CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequest, Pointcut.STORAGE_PRESTORAGE_EXPUNGE_EVERYTHING, hooks);
|
||||
|
||||
ourLog.info("BEGINNING GLOBAL $expunge");
|
||||
myTxService.withRequest(theRequest).withPropagation(Propagation.REQUIRES_NEW).execute(()-> {
|
||||
Propagation propagation = Propagation.REQUIRES_NEW;
|
||||
ReadPartitionIdRequestDetails details = ReadPartitionIdRequestDetails.forOperation(null, null, ProviderConstants.OPERATION_EXPUNGE);
|
||||
RequestPartitionId requestPartitionId = myRequestPartitionHelperSvc.determineReadPartitionForRequest(theRequest, details);
|
||||
|
||||
myTxService.withRequest(theRequest).withPropagation(propagation).withRequestPartitionId(requestPartitionId).execute(() -> {
|
||||
counter.addAndGet(doExpungeEverythingQuery("UPDATE " + TermCodeSystem.class.getSimpleName() + " d SET d.myCurrentVersion = null"));
|
||||
});
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, Batch2WorkChunkEntity.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, Batch2JobInstanceEntity.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, NpmPackageVersionResourceEntity.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, NpmPackageVersionEntity.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, NpmPackageEntity.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, SearchParamPresentEntity.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, BulkImportJobFileEntity.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, BulkImportJobEntity.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ForcedId.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceIndexedSearchParamDate.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceIndexedSearchParamNumber.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceIndexedSearchParamQuantity.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceIndexedSearchParamQuantityNormalized.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceIndexedSearchParamString.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceIndexedSearchParamToken.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceIndexedSearchParamUri.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceIndexedSearchParamCoords.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceIndexedComboStringUnique.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceIndexedComboTokenNonUnique.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceLink.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, SearchResult.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, SearchInclude.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermValueSetConceptDesignation.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermValueSetConcept.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermValueSet.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermConceptParentChildLink.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermConceptMapGroupElementTarget.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermConceptMapGroupElement.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermConceptMapGroup.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermConceptMap.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermConceptProperty.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermConceptDesignation.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermConcept.class));
|
||||
myTxService.withRequest(theRequest).withPropagation(Propagation.REQUIRES_NEW).execute(()-> {
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, Batch2WorkChunkEntity.class, requestPartitionId));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, Batch2JobInstanceEntity.class, requestPartitionId));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, NpmPackageVersionResourceEntity.class, requestPartitionId));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, NpmPackageVersionEntity.class, requestPartitionId));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, NpmPackageEntity.class, requestPartitionId));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, SearchParamPresentEntity.class, requestPartitionId));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, BulkImportJobFileEntity.class, requestPartitionId));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, BulkImportJobEntity.class, requestPartitionId));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ForcedId.class, requestPartitionId));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceIndexedSearchParamDate.class, requestPartitionId));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceIndexedSearchParamNumber.class, requestPartitionId));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceIndexedSearchParamQuantity.class, requestPartitionId));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceIndexedSearchParamQuantityNormalized.class, requestPartitionId));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceIndexedSearchParamString.class, requestPartitionId));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceIndexedSearchParamToken.class, requestPartitionId));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceIndexedSearchParamUri.class, requestPartitionId));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceIndexedSearchParamCoords.class, requestPartitionId));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceIndexedComboStringUnique.class, requestPartitionId));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceIndexedComboTokenNonUnique.class, requestPartitionId));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceLink.class, requestPartitionId));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, SearchResult.class, requestPartitionId));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, SearchInclude.class, requestPartitionId));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermValueSetConceptDesignation.class, requestPartitionId));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermValueSetConcept.class, requestPartitionId));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermValueSet.class, requestPartitionId));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermConceptParentChildLink.class, requestPartitionId));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermConceptMapGroupElementTarget.class, requestPartitionId));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermConceptMapGroupElement.class, requestPartitionId));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermConceptMapGroup.class, requestPartitionId));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermConceptMap.class, requestPartitionId));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermConceptProperty.class, requestPartitionId));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermConceptDesignation.class, requestPartitionId));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermConcept.class, requestPartitionId));
|
||||
myTxService.withRequest(theRequest).withPropagation(propagation).withRequestPartitionId(requestPartitionId).execute(() -> {
|
||||
for (TermCodeSystem next : myEntityManager.createQuery("SELECT c FROM " + TermCodeSystem.class.getName() + " c", TermCodeSystem.class).getResultList()) {
|
||||
next.setCurrentVersion(null);
|
||||
myEntityManager.merge(next);
|
||||
}
|
||||
});
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermCodeSystemVersion.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermCodeSystem.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, SubscriptionTable.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceHistoryTag.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceTag.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TagDefinition.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceHistoryProvenanceEntity.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceHistoryTable.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermCodeSystemVersion.class, requestPartitionId));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TermCodeSystem.class, requestPartitionId));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, SubscriptionTable.class, requestPartitionId));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceHistoryTag.class, requestPartitionId));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceTag.class, requestPartitionId));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, TagDefinition.class, requestPartitionId));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceHistoryProvenanceEntity.class, requestPartitionId));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceHistoryTable.class, requestPartitionId));
|
||||
int counterBefore = counter.get();
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceTable.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, PartitionEntity.class));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, ResourceTable.class, requestPartitionId));
|
||||
counter.addAndGet(expungeEverythingByTypeWithoutPurging(theRequest, PartitionEntity.class, requestPartitionId));
|
||||
|
||||
deletedResourceEntityCount = counter.get() - counterBefore;
|
||||
|
||||
myTxService.withRequest(theRequest).withPropagation(Propagation.REQUIRES_NEW).execute(()-> {
|
||||
myTxService.withRequest(theRequest).withPropagation(propagation).withRequestPartitionId(requestPartitionId).execute(() -> {
|
||||
counter.addAndGet(doExpungeEverythingQuery("DELETE from " + Search.class.getSimpleName() + " d"));
|
||||
});
|
||||
|
||||
purgeAllCaches();
|
||||
purgeAllCaches();
|
||||
|
||||
ourLog.info("COMPLETED GLOBAL $expunge - Deleted {} rows", counter.get());
|
||||
}
|
||||
|
@ -189,42 +198,42 @@ public class ExpungeEverythingService implements IExpungeEverythingService {
|
|||
}
|
||||
|
||||
private void purgeAllCaches() {
|
||||
myMemoryCacheService.invalidateAllCaches();
|
||||
}
|
||||
myMemoryCacheService.invalidateAllCaches();
|
||||
}
|
||||
|
||||
private int expungeEverythingByTypeWithoutPurging(RequestDetails theRequest, Class<?> theEntityType) {
|
||||
int outcome = 0;
|
||||
while (true) {
|
||||
StopWatch sw = new StopWatch();
|
||||
private int expungeEverythingByTypeWithoutPurging(RequestDetails theRequest, Class<?> theEntityType, RequestPartitionId theRequestPartitionId) {
|
||||
int outcome = 0;
|
||||
while (true) {
|
||||
StopWatch sw = new StopWatch();
|
||||
|
||||
int count = myTxService.withRequest(theRequest).withPropagation(Propagation.REQUIRES_NEW).execute(()-> {
|
||||
CriteriaBuilder cb = myEntityManager.getCriteriaBuilder();
|
||||
CriteriaQuery<?> cq = cb.createQuery(theEntityType);
|
||||
cq.from(theEntityType);
|
||||
TypedQuery<?> query = myEntityManager.createQuery(cq);
|
||||
query.setMaxResults(1000);
|
||||
List<?> results = query.getResultList();
|
||||
for (Object result : results) {
|
||||
myEntityManager.remove(result);
|
||||
}
|
||||
return results.size();
|
||||
});
|
||||
int count = myTxService.withRequest(theRequest).withPropagation(Propagation.REQUIRES_NEW).withRequestPartitionId(theRequestPartitionId).execute(() -> {
|
||||
CriteriaBuilder cb = myEntityManager.getCriteriaBuilder();
|
||||
CriteriaQuery<?> cq = cb.createQuery(theEntityType);
|
||||
cq.from(theEntityType);
|
||||
TypedQuery<?> query = myEntityManager.createQuery(cq);
|
||||
query.setMaxResults(1000);
|
||||
List<?> results = query.getResultList();
|
||||
for (Object result : results) {
|
||||
myEntityManager.remove(result);
|
||||
}
|
||||
return results.size();
|
||||
});
|
||||
|
||||
outcome += count;
|
||||
if (count == 0) {
|
||||
break;
|
||||
}
|
||||
outcome += count;
|
||||
if (count == 0) {
|
||||
break;
|
||||
}
|
||||
|
||||
ourLog.info("Have deleted {} entities of type {} in {}", outcome, theEntityType.getSimpleName(), sw.toString());
|
||||
}
|
||||
return outcome;
|
||||
}
|
||||
ourLog.info("Have deleted {} entities of type {} in {}", outcome, theEntityType.getSimpleName(), sw);
|
||||
}
|
||||
return outcome;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int expungeEverythingByType(Class<?> theEntityType) {
|
||||
int result = expungeEverythingByTypeWithoutPurging(null, theEntityType);
|
||||
purgeAllCaches();
|
||||
return result;
|
||||
int result = expungeEverythingByTypeWithoutPurging(null, theEntityType, RequestPartitionId.allPartitions());
|
||||
purgeAllCaches();
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -235,7 +244,7 @@ public class ExpungeEverythingService implements IExpungeEverythingService {
|
|||
private int doExpungeEverythingQuery(String theQuery) {
|
||||
StopWatch sw = new StopWatch();
|
||||
int outcome = myEntityManager.createQuery(theQuery).executeUpdate();
|
||||
ourLog.debug("SqlQuery affected {} rows in {}: {}", outcome, sw.toString(), theQuery);
|
||||
ourLog.debug("SqlQuery affected {} rows in {}: {}", outcome, sw, theQuery);
|
||||
return outcome;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,6 +21,7 @@ package ca.uhn.fhir.jpa.entity;
|
|||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.bulk.export.model.BulkExportJobStatusEnum;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import org.apache.commons.lang3.builder.ToStringBuilder;
|
||||
import org.apache.commons.lang3.builder.ToStringStyle;
|
||||
import org.hl7.fhir.r5.model.InstantType;
|
||||
|
@ -46,6 +47,7 @@ import java.util.ArrayList;
|
|||
import java.util.Collection;
|
||||
import java.util.Date;
|
||||
|
||||
import static ca.uhn.fhir.rest.api.Constants.UUID_LENGTH;
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
import static org.apache.commons.lang3.StringUtils.left;
|
||||
|
||||
|
@ -58,7 +60,7 @@ import static org.apache.commons.lang3.StringUtils.left;
|
|||
* See the BulkExportAppCtx for job details
|
||||
*/
|
||||
@Entity
|
||||
@Table(name = "HFJ_BLK_EXPORT_JOB", uniqueConstraints = {
|
||||
@Table(name = BulkExportJobEntity.HFJ_BLK_EXPORT_JOB, uniqueConstraints = {
|
||||
@UniqueConstraint(name = "IDX_BLKEX_JOB_ID", columnNames = "JOB_ID")
|
||||
}, indexes = {
|
||||
@Index(name = "IDX_BLKEX_EXPTIME", columnList = "EXP_TIME")
|
||||
|
@ -68,13 +70,15 @@ public class BulkExportJobEntity implements Serializable {
|
|||
|
||||
public static final int REQUEST_LENGTH = 1024;
|
||||
public static final int STATUS_MESSAGE_LEN = 500;
|
||||
public static final String JOB_ID = "JOB_ID";
|
||||
public static final String HFJ_BLK_EXPORT_JOB = "HFJ_BLK_EXPORT_JOB";
|
||||
@Id
|
||||
@GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_BLKEXJOB_PID")
|
||||
@SequenceGenerator(name = "SEQ_BLKEXJOB_PID", sequenceName = "SEQ_BLKEXJOB_PID")
|
||||
@Column(name = "PID")
|
||||
private Long myId;
|
||||
|
||||
@Column(name = "JOB_ID", length = Search.UUID_COLUMN_LENGTH, nullable = false)
|
||||
@Column(name = JOB_ID, length = UUID_LENGTH, nullable = false)
|
||||
private String myJobId;
|
||||
|
||||
@Enumerated(EnumType.STRING)
|
||||
|
|
|
@ -40,21 +40,24 @@ import javax.persistence.Version;
|
|||
import java.io.Serializable;
|
||||
import java.util.Date;
|
||||
|
||||
import static ca.uhn.fhir.rest.api.Constants.UUID_LENGTH;
|
||||
import static org.apache.commons.lang3.StringUtils.left;
|
||||
|
||||
@Entity
|
||||
@Table(name = "HFJ_BLK_IMPORT_JOB", uniqueConstraints = {
|
||||
@Table(name = BulkImportJobEntity.HFJ_BLK_IMPORT_JOB, uniqueConstraints = {
|
||||
@UniqueConstraint(name = "IDX_BLKIM_JOB_ID", columnNames = "JOB_ID")
|
||||
})
|
||||
public class BulkImportJobEntity implements Serializable {
|
||||
|
||||
public static final String HFJ_BLK_IMPORT_JOB = "HFJ_BLK_IMPORT_JOB";
|
||||
public static final String JOB_ID = "JOB_ID";
|
||||
@Id
|
||||
@GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_BLKIMJOB_PID")
|
||||
@SequenceGenerator(name = "SEQ_BLKIMJOB_PID", sequenceName = "SEQ_BLKIMJOB_PID")
|
||||
@Column(name = "PID")
|
||||
private Long myId;
|
||||
|
||||
@Column(name = "JOB_ID", length = Search.UUID_COLUMN_LENGTH, nullable = false, updatable = false)
|
||||
@Column(name = JOB_ID, length = UUID_LENGTH, nullable = false, updatable = false)
|
||||
private String myJobId;
|
||||
@Column(name = "JOB_DESC", nullable = true, length = BulkExportJobEntity.STATUS_MESSAGE_LEN)
|
||||
private String myJobDescription;
|
||||
|
|
|
@ -24,6 +24,7 @@ import ca.uhn.fhir.rest.api.Constants;
|
|||
import com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.commons.lang3.builder.ToStringBuilder;
|
||||
import org.apache.commons.lang3.builder.ToStringStyle;
|
||||
import org.hl7.fhir.r4.model.InstantType;
|
||||
|
||||
import javax.persistence.Column;
|
||||
import javax.persistence.Entity;
|
||||
|
@ -149,8 +150,8 @@ public class ResourceReindexJobEntity implements Serializable {
|
|||
ToStringBuilder b = new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE)
|
||||
.append("id", myId)
|
||||
.append("resourceType", myResourceType)
|
||||
.append("thresholdLow", myThresholdLow)
|
||||
.append("thresholdHigh", myThresholdHigh);
|
||||
.append("thresholdLow", new InstantType(myThresholdLow))
|
||||
.append("thresholdHigh", new InstantType(myThresholdHigh));
|
||||
if (myDeleted) {
|
||||
b.append("deleted", myDeleted);
|
||||
}
|
||||
|
|
|
@ -76,13 +76,18 @@ import static org.apache.commons.lang3.StringUtils.left;
|
|||
})
|
||||
public class Search implements ICachedSearchDetails, Serializable {
|
||||
|
||||
/**
|
||||
* Long enough to accommodate a full UUID (36) with an additional prefix
|
||||
* used by megascale (12)
|
||||
*/
|
||||
@SuppressWarnings("WeakerAccess")
|
||||
public static final int UUID_COLUMN_LENGTH = 36;
|
||||
public static final int SEARCH_UUID_COLUMN_LENGTH = 48;
|
||||
public static final String HFJ_SEARCH = "HFJ_SEARCH";
|
||||
private static final int MAX_SEARCH_QUERY_STRING = 10000;
|
||||
private static final int FAILURE_MESSAGE_LENGTH = 500;
|
||||
private static final long serialVersionUID = 1L;
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(Search.class);
|
||||
public static final String SEARCH_UUID = "SEARCH_UUID";
|
||||
@Temporal(TemporalType.TIMESTAMP)
|
||||
@Column(name = "CREATED", nullable = false, updatable = false)
|
||||
private Date myCreated;
|
||||
|
@ -136,7 +141,7 @@ public class Search implements ICachedSearchDetails, Serializable {
|
|||
private SearchStatusEnum myStatus;
|
||||
@Column(name = "TOTAL_COUNT", nullable = true)
|
||||
private Integer myTotalCount;
|
||||
@Column(name = "SEARCH_UUID", length = UUID_COLUMN_LENGTH, nullable = false, updatable = false)
|
||||
@Column(name = SEARCH_UUID, length = SEARCH_UUID_COLUMN_LENGTH, nullable = false, updatable = false)
|
||||
private String myUuid;
|
||||
@SuppressWarnings("unused")
|
||||
@Version
|
||||
|
@ -146,6 +151,9 @@ public class Search implements ICachedSearchDetails, Serializable {
|
|||
@Column(name = "SEARCH_PARAM_MAP", nullable = true)
|
||||
private byte[] mySearchParameterMap;
|
||||
|
||||
@Transient
|
||||
private transient SearchParameterMap mySearchParameterMapTransient;
|
||||
|
||||
/**
|
||||
* This isn't currently persisted in the DB as it's only used for offset mode. We could
|
||||
* change this if needed in the future.
|
||||
|
@ -363,10 +371,12 @@ public class Search implements ICachedSearchDetails, Serializable {
|
|||
myTotalCount = theTotalCount;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getUuid() {
|
||||
return myUuid;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setUuid(String theUuid) {
|
||||
myUuid = theUuid;
|
||||
}
|
||||
|
@ -402,11 +412,26 @@ public class Search implements ICachedSearchDetails, Serializable {
|
|||
return myVersion;
|
||||
}
|
||||
|
||||
/**
|
||||
* Note that this is not always set! We set this if we're storing a
|
||||
* Search in {@link SearchStatusEnum#PASSCMPLET} status since we'll need
|
||||
* the map in order to restart, but otherwise we save space and time by
|
||||
* not storing it.
|
||||
*/
|
||||
public Optional<SearchParameterMap> getSearchParameterMap() {
|
||||
return Optional.ofNullable(mySearchParameterMap).map(t -> SerializationUtils.deserialize(mySearchParameterMap));
|
||||
if (mySearchParameterMapTransient != null) {
|
||||
return Optional.of(mySearchParameterMapTransient);
|
||||
}
|
||||
SearchParameterMap searchParameterMap = null;
|
||||
if (mySearchParameterMap != null) {
|
||||
searchParameterMap = SerializationUtils.deserialize(mySearchParameterMap);
|
||||
mySearchParameterMapTransient = searchParameterMap;
|
||||
}
|
||||
return Optional.ofNullable(searchParameterMap);
|
||||
}
|
||||
|
||||
public void setSearchParameterMap(SearchParameterMap theSearchParameterMap) {
|
||||
mySearchParameterMapTransient = theSearchParameterMap;
|
||||
mySearchParameterMap = SerializationUtils.serialize(theSearchParameterMap);
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,52 @@
|
|||
package ca.uhn.fhir.jpa.esr;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
public class ExternallyStoredResourceAddress {
|
||||
|
||||
private final String myProviderId;
|
||||
private final String myLocation;
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
*
|
||||
* @param theProviderId The ID of the provider which will handle this address. Must match the ID returned by a registered {@link IExternallyStoredResourceService}.
|
||||
* @param theLocation The actual location for the provider to resolve. The format of this string is entirely up to the {@link IExternallyStoredResourceService} and only needs to make sense to it.
|
||||
*/
|
||||
public ExternallyStoredResourceAddress(String theProviderId, String theLocation) {
|
||||
myProviderId = theProviderId;
|
||||
myLocation = theLocation;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return The ID of the provider which will handle this address. Must match the ID returned by a registered {@link IExternallyStoredResourceService}.
|
||||
*/
|
||||
public String getProviderId() {
|
||||
return myProviderId;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return The actual location for the provider to resolve. The format of this string is entirely up to the {@link IExternallyStoredResourceService} and only needs to make sense to it.
|
||||
*/
|
||||
public String getLocation() {
|
||||
return myLocation;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,39 @@
|
|||
package ca.uhn.fhir.jpa.esr;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum;
|
||||
|
||||
public class ExternallyStoredResourceAddressMetadataKey extends ResourceMetadataKeyEnum<ExternallyStoredResourceAddress> {
|
||||
|
||||
/**
|
||||
* Singleton instance
|
||||
*/
|
||||
public static final ExternallyStoredResourceAddressMetadataKey INSTANCE = new ExternallyStoredResourceAddressMetadataKey();
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
*/
|
||||
private ExternallyStoredResourceAddressMetadataKey() {
|
||||
super("ExternallyStoredResourceAddress", ExternallyStoredResourceAddress.class);
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,69 @@
|
|||
package ca.uhn.fhir.jpa.esr;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import org.apache.commons.lang3.Validate;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.defaultString;
|
||||
|
||||
public class ExternallyStoredResourceServiceRegistry {
|
||||
|
||||
private static final String VALID_ID_PATTERN = "[a-zA-Z0-9_.-]+";
|
||||
private final Map<String, IExternallyStoredResourceService> myIdToProvider = new HashMap<>();
|
||||
|
||||
/**
|
||||
* Registers a new provider. Do not call this method after the server has been started.
|
||||
*
|
||||
* @param theProvider The provider to register.
|
||||
*/
|
||||
public void registerProvider(@Nonnull IExternallyStoredResourceService theProvider) {
|
||||
String id = defaultString(theProvider.getId());
|
||||
Validate.isTrue(id.matches(VALID_ID_PATTERN), "Invalid provider ID (must match pattern " + VALID_ID_PATTERN + "): %s", id);
|
||||
Validate.isTrue(!myIdToProvider.containsKey(id), "Already have a provider with ID: %s", id);
|
||||
|
||||
myIdToProvider.put(id, theProvider);
|
||||
}
|
||||
|
||||
/**
|
||||
* Do we have any providers registered?
|
||||
*/
|
||||
public boolean hasProviders() {
|
||||
return !myIdToProvider.isEmpty();
|
||||
}
|
||||
|
||||
/**
|
||||
* Clears all registered providers. This method is mostly intended for unit tests.
|
||||
*/
|
||||
public void clearProviders() {
|
||||
myIdToProvider.clear();
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
public IExternallyStoredResourceService getProvider(@Nonnull String theProviderId) {
|
||||
IExternallyStoredResourceService retVal = myIdToProvider.get(theProviderId);
|
||||
Validate.notNull(retVal, "Invalid ESR provider ID: %s", theProviderId);
|
||||
return retVal;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,41 @@
|
|||
package ca.uhn.fhir.jpa.esr;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
|
||||
public interface IExternallyStoredResourceService {
|
||||
|
||||
/**
|
||||
* Returns the ID of this provider. No two providers may return the same
|
||||
* ID, and this provider should always return the same ID.
|
||||
*/
|
||||
String getId();
|
||||
|
||||
/**
|
||||
* Fetches the given resource using the given address string
|
||||
*
|
||||
* @param theAddress The address string is a format that is entirely up to the individual provider. HAPI FHIR
|
||||
* doesn't try to understand it.
|
||||
* @return HAPI FHIR may modify the returned object, so it is important to always return a new object for every call here (careful with caching!)
|
||||
*/
|
||||
IBaseResource fetchResource(String theAddress);
|
||||
}
|
|
@ -0,0 +1,34 @@
|
|||
|
||||
/**
|
||||
* This package stores the mechanism for Externally Stored Resources (ESRs).
|
||||
* <p>
|
||||
* A normal resource stores its contents in the {@link ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable}
|
||||
* entity as text (compressed JSON or otherwise) with one row in that table per version of the
|
||||
* resource. An ESR still creates a row in that table, but stores a reference to actual body
|
||||
* storage instead.
|
||||
* <p>
|
||||
* THIS IS AN EXPERIMENTAL API - IT MAY GO AWAY OR CHANGE IN THE FUTURE
|
||||
*
|
||||
* @since 6.6.0
|
||||
*/
|
||||
package ca.uhn.fhir.jpa.esr;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
|
@ -21,6 +21,8 @@ package ca.uhn.fhir.jpa.migrate.tasks;
|
|||
*/
|
||||
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.entity.BulkExportJobEntity;
|
||||
import ca.uhn.fhir.jpa.entity.BulkImportJobEntity;
|
||||
import ca.uhn.fhir.jpa.entity.Search;
|
||||
import ca.uhn.fhir.jpa.migrate.DriverTypeEnum;
|
||||
import ca.uhn.fhir.jpa.migrate.taskdef.ArbitrarySqlTask;
|
||||
|
@ -31,13 +33,13 @@ import ca.uhn.fhir.jpa.migrate.tasks.api.BaseMigrationTasks;
|
|||
import ca.uhn.fhir.jpa.migrate.tasks.api.Builder;
|
||||
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
||||
import ca.uhn.fhir.jpa.model.entity.BaseResourceIndexedSearchParam;
|
||||
import ca.uhn.fhir.jpa.model.entity.StorageSettings;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamDate;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamQuantity;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamString;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamToken;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamUri;
|
||||
import ca.uhn.fhir.jpa.model.entity.SearchParamPresentEntity;
|
||||
import ca.uhn.fhir.jpa.model.entity.StorageSettings;
|
||||
import ca.uhn.fhir.util.VersionEnum;
|
||||
|
||||
import java.util.Arrays;
|
||||
|
@ -48,6 +50,8 @@ import java.util.Optional;
|
|||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static ca.uhn.fhir.rest.api.Constants.UUID_LENGTH;
|
||||
|
||||
@SuppressWarnings({"SqlNoDataSourceInspection", "SpellCheckingInspection"})
|
||||
public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
||||
|
||||
|
@ -86,15 +90,13 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
|||
init600(); // 20211102 -
|
||||
init610();
|
||||
init620();
|
||||
init630();
|
||||
init640();
|
||||
init660();
|
||||
}
|
||||
|
||||
protected void init660() {
|
||||
|
||||
|
||||
Builder version = forVersion(VersionEnum.V6_6_0);
|
||||
|
||||
// fix Postgres clob types - that stupid oid driver problem is still there
|
||||
// BT2_JOB_INSTANCE.PARAMS_JSON_LOB
|
||||
version.onTable("BT2_JOB_INSTANCE")
|
||||
|
@ -105,12 +107,25 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
|||
// BT2_WORK_CHUNK.CHUNK_DATA
|
||||
version.onTable("BT2_WORK_CHUNK")
|
||||
.migratePostgresTextClobToBinaryClob("20230208.3", "CHUNK_DATA");
|
||||
|
||||
version
|
||||
.onTable(Search.HFJ_SEARCH)
|
||||
.addColumn("20230215.1", Search.SEARCH_UUID)
|
||||
.nullable()
|
||||
.type(ColumnTypeEnum.STRING, Search.SEARCH_UUID_COLUMN_LENGTH);
|
||||
version
|
||||
.onTable(BulkImportJobEntity.HFJ_BLK_IMPORT_JOB)
|
||||
.addColumn("20230215.2", BulkImportJobEntity.JOB_ID)
|
||||
.nullable()
|
||||
.type(ColumnTypeEnum.STRING, UUID_LENGTH);
|
||||
version
|
||||
.onTable(BulkExportJobEntity.HFJ_BLK_EXPORT_JOB)
|
||||
.addColumn("20230215.3", BulkExportJobEntity.JOB_ID)
|
||||
.nullable()
|
||||
.type(ColumnTypeEnum.STRING, UUID_LENGTH);
|
||||
}
|
||||
|
||||
protected void init640() {
|
||||
|
||||
}
|
||||
|
||||
protected void init630() {
|
||||
Builder version = forVersion(VersionEnum.V6_3_0);
|
||||
|
||||
// start forced_id inline migration
|
||||
|
@ -129,7 +144,7 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
|||
.online(true)
|
||||
.withColumns("SEARCH_PID")
|
||||
.onlyAppliesToPlatforms(NON_AUTOMATIC_FK_INDEX_PLATFORMS);
|
||||
;
|
||||
;
|
||||
|
||||
}
|
||||
|
||||
|
@ -474,10 +489,10 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
|||
|
||||
// Fix for https://github.com/hapifhir/hapi-fhir-jpaserver-starter/issues/328
|
||||
version.onTable("NPM_PACKAGE_VER")
|
||||
.modifyColumn("20220501.1","FHIR_VERSION_ID").nonNullable().withType(ColumnTypeEnum.STRING, 20);
|
||||
.modifyColumn("20220501.1", "FHIR_VERSION_ID").nonNullable().withType(ColumnTypeEnum.STRING, 20);
|
||||
|
||||
version.onTable("NPM_PACKAGE_VER_RES")
|
||||
.modifyColumn("20220501.2","FHIR_VERSION_ID").nonNullable().withType(ColumnTypeEnum.STRING, 20);
|
||||
.modifyColumn("20220501.2", "FHIR_VERSION_ID").nonNullable().withType(ColumnTypeEnum.STRING, 20);
|
||||
|
||||
// Fix for https://gitlab.com/simpatico.ai/cdr/-/issues/3166
|
||||
version.onTable("MPI_LINK")
|
||||
|
@ -822,7 +837,7 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
|||
// Bulk Import Job
|
||||
Builder.BuilderAddTableByColumns blkImportJobTable = version.addTableByColumns("20210410.1", "HFJ_BLK_IMPORT_JOB", "PID");
|
||||
blkImportJobTable.addColumn("PID").nonNullable().type(ColumnTypeEnum.LONG);
|
||||
blkImportJobTable.addColumn("JOB_ID").nonNullable().type(ColumnTypeEnum.STRING, Search.UUID_COLUMN_LENGTH);
|
||||
blkImportJobTable.addColumn("JOB_ID").nonNullable().type(ColumnTypeEnum.STRING, UUID_LENGTH);
|
||||
blkImportJobTable.addColumn("JOB_STATUS").nonNullable().type(ColumnTypeEnum.STRING, 10);
|
||||
blkImportJobTable.addColumn("STATUS_TIME").nonNullable().type(ColumnTypeEnum.DATE_TIMESTAMP);
|
||||
blkImportJobTable.addColumn("STATUS_MESSAGE").nullable().type(ColumnTypeEnum.STRING, 500);
|
||||
|
@ -1500,7 +1515,7 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
|||
version.onTable("HFJ_SEARCH")
|
||||
.addColumn("20190814.6", "SEARCH_PARAM_MAP").nullable().type(ColumnTypeEnum.BLOB);
|
||||
version.onTable("HFJ_SEARCH")
|
||||
.modifyColumn("20190814.7", "SEARCH_UUID").nonNullable().withType(ColumnTypeEnum.STRING, 36);
|
||||
.modifyColumn("20190814.7", "SEARCH_UUID").nonNullable().withType(ColumnTypeEnum.STRING, Search.SEARCH_UUID_COLUMN_LENGTH);
|
||||
|
||||
version.onTable("HFJ_SEARCH_PARM").dropThisTable("20190814.8");
|
||||
|
||||
|
|
|
@ -69,7 +69,7 @@ public class JpaConformanceProviderDstu2 extends ServerConformanceProvider {
|
|||
* Constructor
|
||||
*/
|
||||
@CoverageIgnore
|
||||
public JpaConformanceProviderDstu2(){
|
||||
public JpaConformanceProviderDstu2() {
|
||||
super();
|
||||
super.setCache(false);
|
||||
setIncludeResourceCounts(true);
|
||||
|
|
|
@ -28,6 +28,8 @@ import ca.uhn.fhir.rest.api.server.RequestDetails;
|
|||
import ca.uhn.fhir.rest.server.BasePagingProvider;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
|
||||
// Note: this class is not annotated with @Service because we want to
|
||||
// explicitly define it in BaseConfig.java. This is done so that
|
||||
// implementors can override if they want to.
|
||||
|
@ -51,6 +53,7 @@ public class DatabaseBackedPagingProvider extends BasePagingProvider {
|
|||
|
||||
/**
|
||||
* Constructor
|
||||
*
|
||||
* @deprecated Use {@link DatabaseBackedPagingProvider} as this constructor has no purpose
|
||||
*/
|
||||
@Deprecated
|
||||
|
@ -60,12 +63,19 @@ public class DatabaseBackedPagingProvider extends BasePagingProvider {
|
|||
|
||||
@Override
|
||||
public synchronized IBundleProvider retrieveResultList(RequestDetails theRequestDetails, String theId) {
|
||||
myRequestPartitionHelperSvc.determineReadPartitionForRequestForSearchType(theRequestDetails, "Bundle", null, null);
|
||||
PersistedJpaBundleProvider provider = myPersistedJpaBundleProviderFactory.newInstance(theRequestDetails, theId);
|
||||
if (!provider.ensureSearchEntityLoaded()) {
|
||||
return validateAndReturnBundleProvider(provider);
|
||||
}
|
||||
|
||||
/**
|
||||
* Subclasses may override in order to modify the bundle provider being returned
|
||||
*/
|
||||
@Nullable
|
||||
protected PersistedJpaBundleProvider validateAndReturnBundleProvider(PersistedJpaBundleProvider theBundleProvider) {
|
||||
if (!theBundleProvider.ensureSearchEntityLoaded()) {
|
||||
return null;
|
||||
}
|
||||
return provider;
|
||||
return theBundleProvider;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -24,6 +24,7 @@ import ca.uhn.fhir.context.FhirContext;
|
|||
import ca.uhn.fhir.interceptor.api.HookParams;
|
||||
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||
import ca.uhn.fhir.interceptor.model.ReadPartitionIdRequestDetails;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
|
@ -40,9 +41,10 @@ import ca.uhn.fhir.jpa.entity.SearchTypeEnum;
|
|||
import ca.uhn.fhir.jpa.model.dao.JpaPid;
|
||||
import ca.uhn.fhir.jpa.model.entity.BaseHasResource;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
|
||||
import ca.uhn.fhir.jpa.partition.RequestPartitionHelperSvc;
|
||||
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
|
||||
import ca.uhn.fhir.jpa.search.cache.ISearchCacheSvc;
|
||||
import ca.uhn.fhir.jpa.search.cache.SearchCacheStatusEnum;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.jpa.util.MemoryCacheService;
|
||||
import ca.uhn.fhir.jpa.util.QueryParameterUtils;
|
||||
import ca.uhn.fhir.model.primitive.InstantDt;
|
||||
|
@ -98,7 +100,7 @@ public class PersistedJpaBundleProvider implements IBundleProvider {
|
|||
@Autowired
|
||||
private ISearchCacheSvc mySearchCacheSvc;
|
||||
@Autowired
|
||||
private RequestPartitionHelperSvc myRequestPartitionHelperSvc;
|
||||
private IRequestPartitionHelperSvc myRequestPartitionHelperSvc;
|
||||
@Autowired
|
||||
private JpaStorageSettings myStorageSettings;
|
||||
@Autowired
|
||||
|
@ -130,6 +132,11 @@ public class PersistedJpaBundleProvider implements IBundleProvider {
|
|||
myUuid = theSearch.getUuid();
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
public void setRequestPartitionHelperSvcForUnitTest(IRequestPartitionHelperSvc theRequestPartitionHelperSvc) {
|
||||
myRequestPartitionHelperSvc = theRequestPartitionHelperSvc;
|
||||
}
|
||||
|
||||
protected Search getSearchEntity() {
|
||||
return mySearchEntity;
|
||||
}
|
||||
|
@ -148,7 +155,7 @@ public class PersistedJpaBundleProvider implements IBundleProvider {
|
|||
HistoryBuilder historyBuilder = myHistoryBuilderFactory.newHistoryBuilder(mySearchEntity.getResourceType(),
|
||||
mySearchEntity.getResourceId(), mySearchEntity.getLastUpdatedLow(), mySearchEntity.getLastUpdatedHigh());
|
||||
|
||||
RequestPartitionId partitionId = getRequestPartitionIdForHistory();
|
||||
RequestPartitionId partitionId = getRequestPartitionId();
|
||||
List<ResourceHistoryTable> results = historyBuilder.fetchEntities(partitionId, theOffset, theFromIndex,
|
||||
theToIndex, mySearchEntity.getHistorySearchStyle());
|
||||
|
||||
|
@ -194,18 +201,26 @@ public class PersistedJpaBundleProvider implements IBundleProvider {
|
|||
}
|
||||
|
||||
@Nonnull
|
||||
private RequestPartitionId getRequestPartitionIdForHistory() {
|
||||
protected final RequestPartitionId getRequestPartitionId() {
|
||||
if (myRequestPartitionId == null) {
|
||||
if (mySearchEntity.getResourceId() != null) {
|
||||
// If we have an ID, we've already checked the partition and made sure it's appropriate
|
||||
myRequestPartitionId = RequestPartitionId.allPartitions();
|
||||
ReadPartitionIdRequestDetails details;
|
||||
if (mySearchEntity == null) {
|
||||
details = ReadPartitionIdRequestDetails.forSearchUuid(myUuid);
|
||||
} else if (mySearchEntity.getSearchType() == SearchTypeEnum.HISTORY) {
|
||||
details = ReadPartitionIdRequestDetails.forHistory(mySearchEntity.getResourceType(), null);
|
||||
} else {
|
||||
myRequestPartitionId = myRequestPartitionHelperSvc.determineReadPartitionForRequest(myRequest, mySearchEntity.getResourceType(), null);
|
||||
SearchParameterMap params = mySearchEntity.getSearchParameterMap().orElse(null);
|
||||
details = ReadPartitionIdRequestDetails.forSearchType(mySearchEntity.getResourceType(), params, null);
|
||||
}
|
||||
myRequestPartitionId = myRequestPartitionHelperSvc.determineReadPartitionForRequest(myRequest, details);
|
||||
}
|
||||
return myRequestPartitionId;
|
||||
}
|
||||
|
||||
public void setRequestPartitionId(RequestPartitionId theRequestPartitionId) {
|
||||
myRequestPartitionId = theRequestPartitionId;
|
||||
}
|
||||
|
||||
protected List<IBaseResource> doSearchOrEverything(final int theFromIndex, final int theToIndex) {
|
||||
if (mySearchEntity.getTotalCount() != null && mySearchEntity.getNumFound() <= 0) {
|
||||
// No resources to fetch (e.g. we did a _summary=count search)
|
||||
|
@ -217,8 +232,14 @@ public class PersistedJpaBundleProvider implements IBundleProvider {
|
|||
|
||||
final ISearchBuilder sb = mySearchBuilderFactory.newSearchBuilder(dao, resourceName, resourceType);
|
||||
|
||||
final List<JpaPid> pidsSubList = mySearchCoordinatorSvc.getResources(myUuid, theFromIndex, theToIndex, myRequest);
|
||||
return myTxService.withRequest(myRequest).execute(() -> toResourceList(sb, pidsSubList));
|
||||
RequestPartitionId requestPartitionId = getRequestPartitionId();
|
||||
final List<JpaPid> pidsSubList = mySearchCoordinatorSvc.getResources(myUuid, theFromIndex, theToIndex, myRequest, requestPartitionId);
|
||||
return myTxService
|
||||
.withRequest(myRequest)
|
||||
.withRequestPartitionId(requestPartitionId)
|
||||
.execute(() -> {
|
||||
return toResourceList(sb, pidsSubList);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -226,7 +247,10 @@ public class PersistedJpaBundleProvider implements IBundleProvider {
|
|||
*/
|
||||
public boolean ensureSearchEntityLoaded() {
|
||||
if (mySearchEntity == null) {
|
||||
Optional<Search> searchOpt = myTxService.withRequest(myRequest).execute(() -> mySearchCacheSvc.fetchByUuid(myUuid));
|
||||
Optional<Search> searchOpt = myTxService
|
||||
.withRequest(myRequest)
|
||||
.withRequestPartitionId(myRequestPartitionId)
|
||||
.execute(() -> mySearchCacheSvc.fetchByUuid(myUuid));
|
||||
if (!searchOpt.isPresent()) {
|
||||
return false;
|
||||
}
|
||||
|
@ -263,11 +287,14 @@ public class PersistedJpaBundleProvider implements IBundleProvider {
|
|||
key = MemoryCacheService.HistoryCountKey.forSystem();
|
||||
}
|
||||
|
||||
Function<MemoryCacheService.HistoryCountKey, Integer> supplier = k -> myTxService.withRequest(myRequest).execute(() -> {
|
||||
HistoryBuilder historyBuilder = myHistoryBuilderFactory.newHistoryBuilder(mySearchEntity.getResourceType(), mySearchEntity.getResourceId(), mySearchEntity.getLastUpdatedLow(), mySearchEntity.getLastUpdatedHigh());
|
||||
Long count = historyBuilder.fetchCount(getRequestPartitionIdForHistory());
|
||||
return count.intValue();
|
||||
});
|
||||
Function<MemoryCacheService.HistoryCountKey, Integer> supplier = k -> myTxService
|
||||
.withRequest(myRequest)
|
||||
.withRequestPartitionId(getRequestPartitionId())
|
||||
.execute(() -> {
|
||||
HistoryBuilder historyBuilder = myHistoryBuilderFactory.newHistoryBuilder(mySearchEntity.getResourceType(), mySearchEntity.getResourceId(), mySearchEntity.getLastUpdatedLow(), mySearchEntity.getLastUpdatedHigh());
|
||||
Long count = historyBuilder.fetchCount(getRequestPartitionId());
|
||||
return count.intValue();
|
||||
});
|
||||
|
||||
boolean haveOffset = mySearchEntity.getLastUpdatedLow() != null || mySearchEntity.getLastUpdatedHigh() != null;
|
||||
|
||||
|
@ -307,7 +334,10 @@ public class PersistedJpaBundleProvider implements IBundleProvider {
|
|||
|
||||
switch (mySearchEntity.getSearchType()) {
|
||||
case HISTORY:
|
||||
return myTxService.withRequest(myRequest).execute(() -> doHistoryInTransaction(mySearchEntity.getOffset(), theFromIndex, theToIndex));
|
||||
return myTxService
|
||||
.withRequest(myRequest)
|
||||
.withRequestPartitionId(getRequestPartitionId())
|
||||
.execute(() -> doHistoryInTransaction(mySearchEntity.getOffset(), theFromIndex, theToIndex));
|
||||
case SEARCH:
|
||||
case EVERYTHING:
|
||||
default:
|
||||
|
|
|
@ -20,6 +20,7 @@ package ca.uhn.fhir.jpa.search;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.config.JpaConfig;
|
||||
import ca.uhn.fhir.jpa.dao.ISearchBuilder;
|
||||
import ca.uhn.fhir.jpa.entity.Search;
|
||||
|
@ -52,16 +53,16 @@ public class PersistedJpaBundleProviderFactory {
|
|||
return (PersistedJpaBundleProvider) retVal;
|
||||
}
|
||||
|
||||
public PersistedJpaSearchFirstPageBundleProvider newInstanceFirstPage(RequestDetails theRequestDetails, Search theSearch, SearchTask theTask, ISearchBuilder theSearchBuilder) {
|
||||
return (PersistedJpaSearchFirstPageBundleProvider) myApplicationContext.getBean(JpaConfig.PERSISTED_JPA_SEARCH_FIRST_PAGE_BUNDLE_PROVIDER, theRequestDetails, theSearch, theTask, theSearchBuilder);
|
||||
public PersistedJpaSearchFirstPageBundleProvider newInstanceFirstPage(RequestDetails theRequestDetails, Search theSearch, SearchTask theTask, ISearchBuilder theSearchBuilder, RequestPartitionId theRequestPartitionId) {
|
||||
return (PersistedJpaSearchFirstPageBundleProvider) myApplicationContext.getBean(JpaConfig.PERSISTED_JPA_SEARCH_FIRST_PAGE_BUNDLE_PROVIDER, theRequestDetails, theSearch, theTask, theSearchBuilder, theRequestPartitionId);
|
||||
}
|
||||
|
||||
|
||||
public IBundleProvider history(RequestDetails theRequest, String theResourceType, Long theResourcePid, Date theRangeStartInclusive, Date theRangeEndInclusive, Integer theOffset) {
|
||||
return history(theRequest, theResourceType, theResourcePid, theRangeStartInclusive, theRangeEndInclusive, theOffset, null);
|
||||
public IBundleProvider history(RequestDetails theRequest, String theResourceType, Long theResourcePid, Date theRangeStartInclusive, Date theRangeEndInclusive, Integer theOffset, RequestPartitionId theRequestPartitionId) {
|
||||
return history(theRequest, theResourceType, theResourcePid, theRangeStartInclusive, theRangeEndInclusive, theOffset, null, theRequestPartitionId);
|
||||
}
|
||||
|
||||
public IBundleProvider history(RequestDetails theRequest, String theResourceType, Long theResourcePid, Date theRangeStartInclusive, Date theRangeEndInclusive, Integer theOffset, HistorySearchStyleEnum searchParameterType) {
|
||||
public IBundleProvider history(RequestDetails theRequest, String theResourceType, Long theResourcePid, Date theRangeStartInclusive, Date theRangeEndInclusive, Integer theOffset, HistorySearchStyleEnum searchParameterType, RequestPartitionId theRequestPartitionId) {
|
||||
String resourceName = defaultIfBlank(theResourceType, null);
|
||||
|
||||
Search search = new Search();
|
||||
|
@ -76,7 +77,10 @@ public class PersistedJpaBundleProviderFactory {
|
|||
search.setStatus(SearchStatusEnum.FINISHED);
|
||||
search.setHistorySearchStyle(searchParameterType);
|
||||
|
||||
return newInstance(theRequest, search);
|
||||
PersistedJpaBundleProvider provider = newInstance(theRequest, search);
|
||||
provider.setRequestPartitionId(theRequestPartitionId);
|
||||
|
||||
return provider;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -20,17 +20,17 @@ package ca.uhn.fhir.jpa.search;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.dao.ISearchBuilder;
|
||||
import ca.uhn.fhir.jpa.entity.Search;
|
||||
import ca.uhn.fhir.jpa.entity.SearchTypeEnum;
|
||||
import ca.uhn.fhir.jpa.model.dao.JpaPid;
|
||||
import ca.uhn.fhir.jpa.model.search.SearchStatusEnum;
|
||||
import ca.uhn.fhir.jpa.search.builder.tasks.SearchTask;
|
||||
import ca.uhn.fhir.jpa.util.QueryParameterUtils;
|
||||
import ca.uhn.fhir.model.api.IResource;
|
||||
import ca.uhn.fhir.model.api.ResourceMetadataKeyEnum;
|
||||
import ca.uhn.fhir.model.valueset.BundleEntrySearchModeEnum;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import org.hl7.fhir.instance.model.api.IAnyResource;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
@ -48,11 +48,15 @@ public class PersistedJpaSearchFirstPageBundleProvider extends PersistedJpaBundl
|
|||
/**
|
||||
* Constructor
|
||||
*/
|
||||
public PersistedJpaSearchFirstPageBundleProvider(Search theSearch, SearchTask theSearchTask, ISearchBuilder theSearchBuilder, RequestDetails theRequest) {
|
||||
public PersistedJpaSearchFirstPageBundleProvider(Search theSearch, SearchTask theSearchTask, ISearchBuilder theSearchBuilder, RequestDetails theRequest, RequestPartitionId theRequestPartitionId) {
|
||||
super(theRequest, theSearch.getUuid());
|
||||
|
||||
assert theSearch.getSearchType() != SearchTypeEnum.HISTORY;
|
||||
|
||||
setSearchEntity(theSearch);
|
||||
mySearchTask = theSearchTask;
|
||||
mySearchBuilder = theSearchBuilder;
|
||||
super.setRequestPartitionId(theRequestPartitionId);
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
|
@ -67,9 +71,12 @@ public class PersistedJpaSearchFirstPageBundleProvider extends PersistedJpaBundl
|
|||
final List<JpaPid> pids = mySearchTask.getResourcePids(theFromIndex, theToIndex);
|
||||
ourLog.trace("Done fetching search resource PIDs");
|
||||
|
||||
List<IBaseResource> retVal = myTxService.withRequest(myRequest).execute(() -> {
|
||||
return toResourceList(mySearchBuilder, pids);
|
||||
});
|
||||
RequestPartitionId requestPartitionId = getRequestPartitionId();
|
||||
|
||||
List<IBaseResource> retVal = myTxService
|
||||
.withRequest(myRequest)
|
||||
.withRequestPartitionId(requestPartitionId)
|
||||
.execute(() -> toResourceList(mySearchBuilder, pids));
|
||||
|
||||
long totalCountWanted = theToIndex - theFromIndex;
|
||||
long totalCountMatch = (int) retVal
|
||||
|
|
|
@ -69,7 +69,7 @@ import com.google.common.annotations.VisibleForTesting;
|
|||
import org.apache.commons.lang3.time.DateUtils;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.springframework.beans.factory.BeanFactory;
|
||||
import org.springframework.data.domain.AbstractPageRequest;
|
||||
import org.springframework.data.domain.PageRequest;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.data.domain.Sort;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
@ -108,7 +108,6 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc<JpaPid> {
|
|||
private final SearchBuilderFactory<JpaPid> mySearchBuilderFactory;
|
||||
private final ISynchronousSearchSvc mySynchronousSearchSvc;
|
||||
private final PersistedJpaBundleProviderFactory myPersistedJpaBundleProviderFactory;
|
||||
private final IRequestPartitionHelperSvc myRequestPartitionHelperService;
|
||||
private final ISearchParamRegistry mySearchParamRegistry;
|
||||
private final SearchStrategyFactory mySearchStrategyFactory;
|
||||
private final ExceptionService myExceptionSvc;
|
||||
|
@ -154,7 +153,6 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc<JpaPid> {
|
|||
mySearchBuilderFactory = theSearchBuilderFactory;
|
||||
mySynchronousSearchSvc = theSynchronousSearchSvc;
|
||||
myPersistedJpaBundleProviderFactory = thePersistedJpaBundleProviderFactory;
|
||||
myRequestPartitionHelperService = theRequestPartitionHelperService;
|
||||
mySearchParamRegistry = theSearchParamRegistry;
|
||||
mySearchStrategyFactory = theSearchStrategyFactory;
|
||||
myExceptionSvc = theExceptionSvc;
|
||||
|
@ -201,9 +199,19 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc<JpaPid> {
|
|||
/**
|
||||
* This method is called by the HTTP client processing thread in order to
|
||||
* fetch resources.
|
||||
* <p>
|
||||
* This method must not be called from inside a transaction. The rationale is that
|
||||
* the {@link Search} entity is treated as a piece of shared state across client threads
|
||||
* accessing the same thread, so we need to be able to update that table in a transaction
|
||||
* and commit it right away in order for that to work. Examples of needing to do this
|
||||
* include if two different clients request the same search and are both paging at the
|
||||
* same time, but also includes clients that are hacking the paging links to
|
||||
* fetch multiple pages of a search result in parallel. In both cases we need to only
|
||||
* let one of them actually activate the search, or we will have conficts. The other thread
|
||||
* just needs to wait until the first one actually fetches more results.
|
||||
*/
|
||||
@Override
|
||||
public List<JpaPid> getResources(final String theUuid, int theFrom, int theTo, @Nullable RequestDetails theRequestDetails) {
|
||||
public List<JpaPid> getResources(final String theUuid, int theFrom, int theTo, @Nullable RequestDetails theRequestDetails, RequestPartitionId theRequestPartitionId) {
|
||||
assert !TransactionSynchronizationManager.isActualTransactionActive();
|
||||
|
||||
// If we're actively searching right now, don't try to do anything until at least one batch has been
|
||||
|
@ -240,13 +248,12 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc<JpaPid> {
|
|||
Callable<Search> searchCallback = () -> mySearchCacheSvc
|
||||
.fetchByUuid(theUuid)
|
||||
.orElseThrow(() -> myExceptionSvc.newUnknownSearchException(theUuid));
|
||||
if (theRequestDetails != null) {
|
||||
search = myTxService.withRequest(theRequestDetails).execute(searchCallback);
|
||||
} else {
|
||||
search = HapiTransactionService.invokeCallableAndHandleAnyException(searchCallback);
|
||||
}
|
||||
|
||||
search = myTxService
|
||||
.withRequest(theRequestDetails)
|
||||
.withRequestPartitionId(theRequestPartitionId)
|
||||
.execute(searchCallback);
|
||||
QueryParameterUtils.verifySearchHasntFailedOrThrowInternalErrorException(search);
|
||||
|
||||
if (search.getStatus() == SearchStatusEnum.FINISHED) {
|
||||
ourLog.trace("Search entity marked as finished with {} results", search.getNumFound());
|
||||
break;
|
||||
|
@ -272,7 +279,6 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc<JpaPid> {
|
|||
String resourceType = search.getResourceType();
|
||||
SearchParameterMap params = search.getSearchParameterMap().orElseThrow(() -> new IllegalStateException("No map in PASSCOMPLET search"));
|
||||
IFhirResourceDao<?> resourceDao = myDaoRegistry.getResourceDao(resourceType);
|
||||
RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineReadPartitionForRequestForSearchType(theRequestDetails, resourceType, params, null);
|
||||
|
||||
SearchTaskParameters parameters = new SearchTaskParameters(
|
||||
search,
|
||||
|
@ -280,7 +286,7 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc<JpaPid> {
|
|||
params,
|
||||
resourceType,
|
||||
theRequestDetails,
|
||||
requestPartitionId,
|
||||
theRequestPartitionId,
|
||||
myOnRemoveSearchTask,
|
||||
mySyncSize
|
||||
);
|
||||
|
@ -299,7 +305,7 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc<JpaPid> {
|
|||
|
||||
ourLog.trace("Finished looping");
|
||||
|
||||
List<JpaPid> pids = fetchResultPids(theUuid, theFrom, theTo, theRequestDetails, search);
|
||||
List<JpaPid> pids = fetchResultPids(theUuid, theFrom, theTo, theRequestDetails, search, theRequestPartitionId);
|
||||
|
||||
ourLog.trace("Fetched {} results", pids.size());
|
||||
|
||||
|
@ -307,8 +313,8 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc<JpaPid> {
|
|||
}
|
||||
|
||||
@Nonnull
|
||||
private List<JpaPid> fetchResultPids(String theUuid, int theFrom, int theTo, @Nullable RequestDetails theRequestDetails, Search theSearch) {
|
||||
List<JpaPid> pids = myTxService.withRequest(theRequestDetails).execute(() -> mySearchResultCacheSvc.fetchResultPids(theSearch, theFrom, theTo));
|
||||
private List<JpaPid> fetchResultPids(String theUuid, int theFrom, int theTo, @Nullable RequestDetails theRequestDetails, Search theSearch, RequestPartitionId theRequestPartitionId) {
|
||||
List<JpaPid> pids = mySearchResultCacheSvc.fetchResultPids(theSearch, theFrom, theTo, theRequestDetails, theRequestPartitionId);
|
||||
if (pids == null) {
|
||||
throw myExceptionSvc.newUnknownSearchException(theUuid);
|
||||
}
|
||||
|
@ -325,7 +331,7 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc<JpaPid> {
|
|||
Search search = new Search();
|
||||
QueryParameterUtils.populateSearchEntity(theParams, theResourceType, searchUuid, queryString, search, theRequestPartitionId);
|
||||
|
||||
myStorageInterceptorHooks.callStoragePresearchRegistered(theRequestDetails, theParams, search);
|
||||
myStorageInterceptorHooks.callStoragePresearchRegistered(theRequestDetails, theParams, search, theRequestPartitionId);
|
||||
|
||||
validateSearch(theParams);
|
||||
|
||||
|
@ -483,7 +489,7 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc<JpaPid> {
|
|||
myIdToSearchTask.put(theSearch.getUuid(), task);
|
||||
task.call();
|
||||
|
||||
PersistedJpaSearchFirstPageBundleProvider retVal = myPersistedJpaBundleProviderFactory.newInstanceFirstPage(theRequestDetails, theSearch, task, theSb);
|
||||
PersistedJpaSearchFirstPageBundleProvider retVal = myPersistedJpaBundleProviderFactory.newInstanceFirstPage(theRequestDetails, theSearch, task, theSb, theRequestPartitionId);
|
||||
|
||||
ourLog.debug("Search initial phase completed in {}ms", w.getMillis());
|
||||
return retVal;
|
||||
|
@ -492,34 +498,37 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc<JpaPid> {
|
|||
@Nullable
|
||||
private PersistedJpaBundleProvider findCachedQuery(SearchParameterMap theParams, String theResourceType, RequestDetails theRequestDetails, String theQueryString, RequestPartitionId theRequestPartitionId) {
|
||||
// May be null
|
||||
return myTxService.withRequest(theRequestDetails).execute(() -> {
|
||||
return myTxService
|
||||
.withRequest(theRequestDetails)
|
||||
.withRequestPartitionId(theRequestPartitionId)
|
||||
.execute(() -> {
|
||||
|
||||
// Interceptor call: STORAGE_PRECHECK_FOR_CACHED_SEARCH
|
||||
HookParams params = new HookParams()
|
||||
.add(SearchParameterMap.class, theParams)
|
||||
.add(RequestDetails.class, theRequestDetails)
|
||||
.addIfMatchesType(ServletRequestDetails.class, theRequestDetails);
|
||||
Object outcome = CompositeInterceptorBroadcaster.doCallHooksAndReturnObject(myInterceptorBroadcaster, theRequestDetails, Pointcut.STORAGE_PRECHECK_FOR_CACHED_SEARCH, params);
|
||||
if (Boolean.FALSE.equals(outcome)) {
|
||||
return null;
|
||||
}
|
||||
// Interceptor call: STORAGE_PRECHECK_FOR_CACHED_SEARCH
|
||||
HookParams params = new HookParams()
|
||||
.add(SearchParameterMap.class, theParams)
|
||||
.add(RequestDetails.class, theRequestDetails)
|
||||
.addIfMatchesType(ServletRequestDetails.class, theRequestDetails);
|
||||
Object outcome = CompositeInterceptorBroadcaster.doCallHooksAndReturnObject(myInterceptorBroadcaster, theRequestDetails, Pointcut.STORAGE_PRECHECK_FOR_CACHED_SEARCH, params);
|
||||
if (Boolean.FALSE.equals(outcome)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Check for a search matching the given hash
|
||||
Search searchToUse = findSearchToUseOrNull(theQueryString, theResourceType, theRequestPartitionId);
|
||||
if (searchToUse == null) {
|
||||
return null;
|
||||
}
|
||||
// Check for a search matching the given hash
|
||||
Search searchToUse = findSearchToUseOrNull(theQueryString, theResourceType, theRequestPartitionId);
|
||||
if (searchToUse == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
ourLog.debug("Reusing search {} from cache", searchToUse.getUuid());
|
||||
// Interceptor call: JPA_PERFTRACE_SEARCH_REUSING_CACHED
|
||||
params = new HookParams()
|
||||
.add(SearchParameterMap.class, theParams)
|
||||
.add(RequestDetails.class, theRequestDetails)
|
||||
.addIfMatchesType(ServletRequestDetails.class, theRequestDetails);
|
||||
CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequestDetails, Pointcut.JPA_PERFTRACE_SEARCH_REUSING_CACHED, params);
|
||||
ourLog.debug("Reusing search {} from cache", searchToUse.getUuid());
|
||||
// Interceptor call: JPA_PERFTRACE_SEARCH_REUSING_CACHED
|
||||
params = new HookParams()
|
||||
.add(SearchParameterMap.class, theParams)
|
||||
.add(RequestDetails.class, theRequestDetails)
|
||||
.addIfMatchesType(ServletRequestDetails.class, theRequestDetails);
|
||||
CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequestDetails, Pointcut.JPA_PERFTRACE_SEARCH_REUSING_CACHED, params);
|
||||
|
||||
return myPersistedJpaBundleProviderFactory.newInstance(theRequestDetails, searchToUse.getUuid());
|
||||
});
|
||||
return myPersistedJpaBundleProviderFactory.newInstance(theRequestDetails, searchToUse.getUuid());
|
||||
});
|
||||
}
|
||||
|
||||
@Nullable
|
||||
|
@ -563,7 +572,7 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc<JpaPid> {
|
|||
|
||||
int pageIndex = theFromIndex / pageSize;
|
||||
|
||||
Pageable page = new AbstractPageRequest(pageIndex, pageSize) {
|
||||
return new PageRequest(pageIndex, pageSize, Sort.unsorted()) {
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
@Override
|
||||
|
@ -571,33 +580,7 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc<JpaPid> {
|
|||
return theFromIndex;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Sort getSort() {
|
||||
return Sort.unsorted();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Pageable next() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Pageable previous() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Pageable first() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Pageable withPage(int theI) {
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
return page;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -36,6 +36,7 @@ import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
|
|||
import ca.uhn.fhir.jpa.interceptor.JpaPreResourceAccessDetails;
|
||||
import ca.uhn.fhir.jpa.model.dao.JpaPid;
|
||||
import ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails;
|
||||
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.model.api.IQueryParameterType;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
|
@ -85,6 +86,9 @@ public class SynchronousSearchSvcImpl implements ISynchronousSearchSvc {
|
|||
@Autowired
|
||||
private EntityManager myEntityManager;
|
||||
|
||||
@Autowired
|
||||
private IRequestPartitionHelperSvc myRequestPartitionHelperSvc;
|
||||
|
||||
private int mySyncSize = 250;
|
||||
|
||||
@Override
|
||||
|
@ -97,8 +101,11 @@ public class SynchronousSearchSvcImpl implements ISynchronousSearchSvc {
|
|||
boolean wantCount = theParamWantOnlyCount || theParamOrConfigWantCount;
|
||||
|
||||
// Execute the query and make sure we return distinct results
|
||||
|
||||
return myTxService.withRequest(theRequestDetails).readOnly().execute(() -> {
|
||||
return myTxService
|
||||
.withRequest(theRequestDetails)
|
||||
.withRequestPartitionId(theRequestPartitionId)
|
||||
.readOnly()
|
||||
.execute(() -> {
|
||||
|
||||
// Load the results synchronously
|
||||
final List<JpaPid> pids = new ArrayList<>();
|
||||
|
|
|
@ -878,7 +878,7 @@ public class SearchBuilder implements ISearchBuilder<JpaPid> {
|
|||
resourceId.setVersion(version);
|
||||
if (version != null && !version.equals(next.getVersion())) {
|
||||
IFhirResourceDao<? extends IBaseResource> dao = myDaoRegistry.getResourceDao(resourceType);
|
||||
next = dao.readEntity(next.getIdDt().withVersion(Long.toString(version)), null);
|
||||
next = (IBaseResourceEntity) dao.readEntity(next.getIdDt().withVersion(Long.toString(version)), null);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -23,8 +23,8 @@ package ca.uhn.fhir.jpa.search.builder;
|
|||
import ca.uhn.fhir.interceptor.api.HookParams;
|
||||
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.entity.Search;
|
||||
import ca.uhn.fhir.jpa.search.SearchCoordinatorSvcImpl;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
||||
|
@ -46,13 +46,15 @@ public class StorageInterceptorHooksFacade {
|
|||
* @param theRequestDetails
|
||||
* @param theParams
|
||||
* @param search
|
||||
* @param theRequestPartitionId
|
||||
*/
|
||||
public void callStoragePresearchRegistered(RequestDetails theRequestDetails, SearchParameterMap theParams, Search search) {
|
||||
public void callStoragePresearchRegistered(RequestDetails theRequestDetails, SearchParameterMap theParams, Search search, RequestPartitionId theRequestPartitionId) {
|
||||
HookParams params = new HookParams()
|
||||
.add(ICachedSearchDetails.class, search)
|
||||
.add(RequestDetails.class, theRequestDetails)
|
||||
.addIfMatchesType(ServletRequestDetails.class, theRequestDetails)
|
||||
.add(SearchParameterMap.class, theParams);
|
||||
.add(SearchParameterMap.class, theParams)
|
||||
.add(RequestPartitionId.class, theRequestPartitionId);
|
||||
CompositeInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequestDetails, Pointcut.STORAGE_PRESEARCH_REGISTERED, params);
|
||||
}
|
||||
//private IInterceptorBroadcaster myInterceptorBroadcaster;
|
||||
|
|
|
@ -22,6 +22,7 @@ package ca.uhn.fhir.jpa.search.builder.tasks;
|
|||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.dao.SearchBuilderFactory;
|
||||
import ca.uhn.fhir.jpa.model.dao.JpaPid;
|
||||
|
@ -74,8 +75,12 @@ public class SearchContinuationTask extends SearchTask {
|
|||
@Override
|
||||
public Void call() {
|
||||
try {
|
||||
myTxService.withRequest(myRequestDetails).execute(() -> {
|
||||
List<JpaPid> previouslyAddedResourcePids = mySearchResultCacheSvc.fetchAllResultPids(getSearch());
|
||||
RequestPartitionId requestPartitionId = getRequestPartitionId();
|
||||
myTxService
|
||||
.withRequest(myRequestDetails)
|
||||
.withRequestPartitionId(requestPartitionId)
|
||||
.execute(() -> {
|
||||
List<JpaPid> previouslyAddedResourcePids = mySearchResultCacheSvc.fetchAllResultPids(getSearch(), myRequestDetails, requestPartitionId);
|
||||
if (previouslyAddedResourcePids == null) {
|
||||
throw myExceptionSvc.newUnknownSearchException(getSearch().getUuid());
|
||||
}
|
||||
|
|
|
@ -92,7 +92,10 @@ import static org.apache.commons.lang3.ObjectUtils.defaultIfNull;
|
|||
public class SearchTask implements Callable<Void> {
|
||||
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(SearchTask.class);
|
||||
|
||||
// injected beans
|
||||
protected final HapiTransactionService myTxService;
|
||||
protected final FhirContext myContext;
|
||||
protected final ISearchResultCacheSvc mySearchResultCacheSvc;
|
||||
private final SearchParameterMap myParams;
|
||||
private final IDao myCallingDao;
|
||||
private final String myResourceType;
|
||||
|
@ -104,6 +107,14 @@ public class SearchTask implements Callable<Void> {
|
|||
private final RequestPartitionId myRequestPartitionId;
|
||||
private final SearchRuntimeDetails mySearchRuntimeDetails;
|
||||
private final Transaction myParentTransaction;
|
||||
private final Consumer<String> myOnRemove;
|
||||
private final int mySyncSize;
|
||||
private final Integer myLoadingThrottleForUnitTests;
|
||||
private final IInterceptorBroadcaster myInterceptorBroadcaster;
|
||||
private final SearchBuilderFactory<JpaPid> mySearchBuilderFactory;
|
||||
private final JpaStorageSettings myStorageSettings;
|
||||
private final ISearchCacheSvc mySearchCacheSvc;
|
||||
private final IPagingProvider myPagingProvider;
|
||||
private Search mySearch;
|
||||
private boolean myAbortRequested;
|
||||
private int myCountSavedTotal = 0;
|
||||
|
@ -112,22 +123,6 @@ public class SearchTask implements Callable<Void> {
|
|||
private boolean myAdditionalPrefetchThresholdsRemaining;
|
||||
private List<JpaPid> myPreviouslyAddedResourcePids;
|
||||
private Integer myMaxResultsToFetch;
|
||||
|
||||
private final Consumer<String> myOnRemove;
|
||||
|
||||
private final int mySyncSize;
|
||||
private final Integer myLoadingThrottleForUnitTests;
|
||||
|
||||
// injected beans
|
||||
protected final HapiTransactionService myTxService;
|
||||
protected final FhirContext myContext;
|
||||
private final IInterceptorBroadcaster myInterceptorBroadcaster;
|
||||
private final SearchBuilderFactory<JpaPid> mySearchBuilderFactory;
|
||||
protected final ISearchResultCacheSvc mySearchResultCacheSvc;
|
||||
private final JpaStorageSettings myStorageSettings;
|
||||
private final ISearchCacheSvc mySearchCacheSvc;
|
||||
private final IPagingProvider myPagingProvider;
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
*/
|
||||
|
@ -169,6 +164,10 @@ public class SearchTask implements Callable<Void> {
|
|||
myParentTransaction = ElasticApm.currentTransaction();
|
||||
}
|
||||
|
||||
protected RequestPartitionId getRequestPartitionId() {
|
||||
return myRequestPartitionId;
|
||||
}
|
||||
|
||||
/**
|
||||
* This method is called by the server HTTP thread, and
|
||||
* will block until at least one page of results have been
|
||||
|
@ -267,11 +266,18 @@ public class SearchTask implements Callable<Void> {
|
|||
}
|
||||
|
||||
public void saveSearch() {
|
||||
myTxService.execute(myRequest, null, Propagation.REQUIRES_NEW, Isolation.DEFAULT, ()->doSaveSearch());
|
||||
myTxService
|
||||
.withRequest(myRequest)
|
||||
.withRequestPartitionId(myRequestPartitionId)
|
||||
.withPropagation(Propagation.REQUIRES_NEW)
|
||||
.execute(() -> doSaveSearch());
|
||||
}
|
||||
|
||||
private void saveUnsynced(final IResultIterator theResultIter) {
|
||||
myTxService.withRequest(myRequest).execute(()->{
|
||||
myTxService
|
||||
.withRequest(myRequest)
|
||||
.withRequestPartitionId(myRequestPartitionId)
|
||||
.execute(() -> {
|
||||
if (mySearch.getId() == null) {
|
||||
doSaveSearch();
|
||||
}
|
||||
|
@ -303,7 +309,7 @@ public class SearchTask implements Callable<Void> {
|
|||
// Actually store the results in the query cache storage
|
||||
myCountSavedTotal += unsyncedPids.size();
|
||||
myCountSavedThisPass += unsyncedPids.size();
|
||||
mySearchResultCacheSvc.storeResults(mySearch, mySyncedPids, unsyncedPids);
|
||||
mySearchResultCacheSvc.storeResults(mySearch, mySyncedPids, unsyncedPids, myRequest, getRequestPartitionId());
|
||||
|
||||
synchronized (mySyncedPids) {
|
||||
int numSyncedThisPass = unsyncedPids.size();
|
||||
|
@ -387,7 +393,11 @@ public class SearchTask implements Callable<Void> {
|
|||
// Create an initial search in the DB and give it an ID
|
||||
saveSearch();
|
||||
|
||||
myTxService.execute(myRequest, null, Propagation.REQUIRED, Isolation.READ_COMMITTED, ()->doSearch());
|
||||
myTxService
|
||||
.withRequest(myRequest)
|
||||
.withRequestPartitionId(myRequestPartitionId)
|
||||
.withIsolation(Isolation.READ_COMMITTED)
|
||||
.execute(() -> doSearch());
|
||||
|
||||
mySearchRuntimeDetails.setSearchStatus(mySearch.getStatus());
|
||||
if (mySearch.getStatus() == SearchStatusEnum.FINISHED) {
|
||||
|
@ -506,13 +516,16 @@ public class SearchTask implements Callable<Void> {
|
|||
|
||||
ourLog.trace("Got count {}", count);
|
||||
|
||||
myTxService.withRequest(myRequest).execute(()->{
|
||||
mySearch.setTotalCount(count.intValue());
|
||||
if (myParamWantOnlyCount) {
|
||||
mySearch.setStatus(SearchStatusEnum.FINISHED);
|
||||
}
|
||||
doSaveSearch();
|
||||
});
|
||||
myTxService
|
||||
.withRequest(myRequest)
|
||||
.withRequestPartitionId(myRequestPartitionId)
|
||||
.execute(() -> {
|
||||
mySearch.setTotalCount(count.intValue());
|
||||
if (myParamWantOnlyCount) {
|
||||
mySearch.setStatus(SearchStatusEnum.FINISHED);
|
||||
}
|
||||
doSaveSearch();
|
||||
});
|
||||
if (myParamWantOnlyCount) {
|
||||
return;
|
||||
}
|
||||
|
|
|
@ -111,7 +111,6 @@ public class DatabaseSearchCacheSvcImpl implements ISearchCacheSvc {
|
|||
}
|
||||
|
||||
@Override
|
||||
@Transactional(propagation = Propagation.NEVER)
|
||||
public Optional<Search> tryToMarkSearchAsInProgress(Search theSearch) {
|
||||
ourLog.trace("Going to try to change search status from {} to {}", theSearch.getStatus(), SearchStatusEnum.LOADING);
|
||||
try {
|
||||
|
|
|
@ -20,17 +20,18 @@ package ca.uhn.fhir.jpa.search.cache;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.dao.data.ISearchResultDao;
|
||||
import ca.uhn.fhir.jpa.dao.tx.IHapiTransactionService;
|
||||
import ca.uhn.fhir.jpa.entity.Search;
|
||||
import ca.uhn.fhir.jpa.entity.SearchResult;
|
||||
import ca.uhn.fhir.jpa.model.dao.JpaPid;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import com.google.common.collect.Lists;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.transaction.annotation.Propagation;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
@ -43,50 +44,65 @@ public class DatabaseSearchResultCacheSvcImpl implements ISearchResultCacheSvc {
|
|||
@Autowired
|
||||
private ISearchResultDao mySearchResultDao;
|
||||
|
||||
@Autowired
|
||||
private IHapiTransactionService myTransactionService;
|
||||
|
||||
@Override
|
||||
@Transactional(propagation = Propagation.REQUIRED)
|
||||
public List<JpaPid> fetchResultPids(Search theSearch, int theFrom, int theTo) {
|
||||
final Pageable page = toPage(theFrom, theTo);
|
||||
if (page == null) {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
public List<JpaPid> fetchResultPids(Search theSearch, int theFrom, int theTo, RequestDetails theRequestDetails, RequestPartitionId theRequestPartitionId) {
|
||||
return myTransactionService
|
||||
.withRequest(theRequestDetails)
|
||||
.withRequestPartitionId(theRequestPartitionId)
|
||||
.execute(() -> {
|
||||
final Pageable page = toPage(theFrom, theTo);
|
||||
if (page == null) {
|
||||
return Collections.emptyList();
|
||||
}
|
||||
|
||||
List<Long> retVal = mySearchResultDao
|
||||
.findWithSearchPid(theSearch.getId(), page)
|
||||
.getContent();
|
||||
List<Long> retVal = mySearchResultDao
|
||||
.findWithSearchPid(theSearch.getId(), page)
|
||||
.getContent();
|
||||
|
||||
ourLog.debug("fetchResultPids for range {}-{} returned {} pids", theFrom, theTo, retVal.size());
|
||||
ourLog.debug("fetchResultPids for range {}-{} returned {} pids", theFrom, theTo, retVal.size());
|
||||
|
||||
return JpaPid.fromLongList(retVal);
|
||||
return JpaPid.fromLongList(retVal);
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
@Transactional(propagation = Propagation.REQUIRED)
|
||||
public List<JpaPid> fetchAllResultPids(Search theSearch) {
|
||||
List<Long> retVal = mySearchResultDao.findWithSearchPidOrderIndependent(theSearch.getId());
|
||||
ourLog.trace("fetchAllResultPids returned {} pids", retVal.size());
|
||||
return JpaPid.fromLongList(retVal);
|
||||
public List<JpaPid> fetchAllResultPids(Search theSearch, RequestDetails theRequestDetails, RequestPartitionId theRequestPartitionId) {
|
||||
return myTransactionService
|
||||
.withRequest(theRequestDetails)
|
||||
.withRequestPartitionId(theRequestPartitionId)
|
||||
.execute(() -> {
|
||||
List<Long> retVal = mySearchResultDao.findWithSearchPidOrderIndependent(theSearch.getId());
|
||||
ourLog.trace("fetchAllResultPids returned {} pids", retVal.size());
|
||||
return JpaPid.fromLongList(retVal);
|
||||
});
|
||||
}
|
||||
|
||||
@Override
|
||||
@Transactional(propagation = Propagation.REQUIRED)
|
||||
public void storeResults(Search theSearch, List<JpaPid> thePreviouslyStoredResourcePids, List<JpaPid> theNewResourcePids) {
|
||||
List<SearchResult> resultsToSave = Lists.newArrayList();
|
||||
public void storeResults(Search theSearch, List<JpaPid> thePreviouslyStoredResourcePids, List<JpaPid> theNewResourcePids, RequestDetails theRequestDetails, RequestPartitionId theRequestPartitionId) {
|
||||
myTransactionService
|
||||
.withRequest(theRequestDetails)
|
||||
.withRequestPartitionId(theRequestPartitionId)
|
||||
.execute(() -> {
|
||||
List<SearchResult> resultsToSave = Lists.newArrayList();
|
||||
|
||||
ourLog.debug("Storing {} results with {} previous for search", theNewResourcePids.size(), thePreviouslyStoredResourcePids.size());
|
||||
ourLog.debug("Storing {} results with {} previous for search", theNewResourcePids.size(), thePreviouslyStoredResourcePids.size());
|
||||
|
||||
int order = thePreviouslyStoredResourcePids.size();
|
||||
for (JpaPid nextPid : theNewResourcePids) {
|
||||
SearchResult nextResult = new SearchResult(theSearch);
|
||||
nextResult.setResourcePid(nextPid.getId());
|
||||
nextResult.setOrder(order);
|
||||
resultsToSave.add(nextResult);
|
||||
ourLog.trace("Saving ORDER[{}] Resource {}", order, nextResult.getResourcePid());
|
||||
int order = thePreviouslyStoredResourcePids.size();
|
||||
for (JpaPid nextPid : theNewResourcePids) {
|
||||
SearchResult nextResult = new SearchResult(theSearch);
|
||||
nextResult.setResourcePid(nextPid.getId());
|
||||
nextResult.setOrder(order);
|
||||
resultsToSave.add(nextResult);
|
||||
ourLog.trace("Saving ORDER[{}] Resource {}", order, nextResult.getResourcePid());
|
||||
|
||||
order++;
|
||||
}
|
||||
order++;
|
||||
}
|
||||
|
||||
mySearchResultDao.saveAll(resultsToSave);
|
||||
mySearchResultDao.saveAll(resultsToSave);
|
||||
});
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -20,8 +20,10 @@ package ca.uhn.fhir.jpa.search.cache;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.entity.Search;
|
||||
import ca.uhn.fhir.jpa.model.dao.JpaPid;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
import java.util.List;
|
||||
|
@ -30,12 +32,12 @@ public interface ISearchResultCacheSvc {
|
|||
/**
|
||||
* @param theSearch The search - This method is not required to persist any chances to the Search object, it is only provided here for identification
|
||||
* @param thePreviouslyStoredResourcePids A list of resource PIDs that have previously been saved to this search
|
||||
* @param theNewResourcePids A list of new resoure PIDs to add to this search (these ones have not been previously saved)
|
||||
* @param theNewResourcePids A list of new resource PIDs to add to this search (these ones have not been previously saved)
|
||||
*/
|
||||
void storeResults(Search theSearch, List<JpaPid> thePreviouslyStoredResourcePids, List<JpaPid> theNewResourcePids);
|
||||
void storeResults(Search theSearch, List<JpaPid> thePreviouslyStoredResourcePids, List<JpaPid> theNewResourcePids, RequestDetails theRequestDetails, RequestPartitionId theRequestPartitionId);
|
||||
|
||||
/**
|
||||
* Fetch a sunset of the search result IDs from the cache
|
||||
* Fetch a subset of the search result IDs from the cache
|
||||
*
|
||||
* @param theSearch The search to fetch IDs for
|
||||
* @param theFrom The starting index (inclusive)
|
||||
|
@ -44,7 +46,7 @@ public interface ISearchResultCacheSvc {
|
|||
* have been removed from the cache for some reason, such as expiry or manual purge)
|
||||
*/
|
||||
@Nullable
|
||||
List<JpaPid> fetchResultPids(Search theSearch, int theFrom, int theTo);
|
||||
List<JpaPid> fetchResultPids(Search theSearch, int theFrom, int theTo, RequestDetails theRequestDetails, RequestPartitionId theRequestPartitionId);
|
||||
|
||||
/**
|
||||
* Fetch all result PIDs for a given search with no particular order required
|
||||
|
@ -54,6 +56,6 @@ public interface ISearchResultCacheSvc {
|
|||
* have been removed from the cache for some reason, such as expiry or manual purge)
|
||||
*/
|
||||
@Nullable
|
||||
List<JpaPid> fetchAllResultPids(Search theSearch);
|
||||
List<JpaPid> fetchAllResultPids(Search theSearch, RequestDetails theRequestDetails, RequestPartitionId theRequestPartitionId);
|
||||
|
||||
}
|
||||
|
|
|
@ -0,0 +1,45 @@
|
|||
package ca.uhn.fhir.jpa.util;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
|
||||
import org.springframework.orm.jpa.JpaDialect;
|
||||
import org.springframework.orm.jpa.JpaTransactionManager;
|
||||
import org.springframework.orm.jpa.vendor.HibernateJpaDialect;
|
||||
|
||||
public class JpaHapiTransactionService extends HapiTransactionService {
|
||||
|
||||
private volatile Boolean myCustomIsolationSupported;
|
||||
|
||||
@Override
|
||||
public boolean isCustomIsolationSupported() {
|
||||
if (myCustomIsolationSupported == null) {
|
||||
if (myTransactionManager instanceof JpaTransactionManager) {
|
||||
JpaDialect jpaDialect = ((JpaTransactionManager) myTransactionManager).getJpaDialect();
|
||||
myCustomIsolationSupported = (jpaDialect instanceof HibernateJpaDialect);
|
||||
} else {
|
||||
myCustomIsolationSupported = false;
|
||||
}
|
||||
}
|
||||
return myCustomIsolationSupported;
|
||||
}
|
||||
|
||||
}
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.5.2-SNAPSHOT</version>
|
||||
<version>6.5.3-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.5.2-SNAPSHOT</version>
|
||||
<version>6.5.3-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.5.2-SNAPSHOT</version>
|
||||
<version>6.5.3-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -44,7 +44,6 @@ import ca.uhn.fhir.mdm.model.MdmTransactionContext;
|
|||
import ca.uhn.fhir.mdm.provider.MdmControllerHelper;
|
||||
import ca.uhn.fhir.mdm.provider.MdmControllerUtil;
|
||||
import ca.uhn.fhir.model.primitive.IdDt;
|
||||
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.server.provider.ProviderConstants;
|
||||
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
||||
|
@ -125,9 +124,8 @@ public class MdmControllerSvcImpl implements IMdmControllerSvc {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Page<MdmLinkJson> queryLinks(MdmQuerySearchParameters theMdmQuerySearchParameters, MdmTransactionContext theMdmTransactionContext, RequestDetails theRequestDetails)
|
||||
{
|
||||
RequestPartitionId theReadPartitionId = myRequestPartitionHelperSvc.determineReadPartitionForRequest(theRequestDetails, null, null);
|
||||
public Page<MdmLinkJson> queryLinks(MdmQuerySearchParameters theMdmQuerySearchParameters, MdmTransactionContext theMdmTransactionContext, RequestDetails theRequestDetails) {
|
||||
RequestPartitionId theReadPartitionId = myRequestPartitionHelperSvc.determineReadPartitionForRequest(theRequestDetails, null);
|
||||
Page<MdmLinkJson> resultPage;
|
||||
if (theReadPartitionId.hasPartitionIds()) {
|
||||
theMdmQuerySearchParameters.setPartitionIds(theReadPartitionId.getPartitionIds());
|
||||
|
@ -166,12 +164,12 @@ public class MdmControllerSvcImpl implements IMdmControllerSvc {
|
|||
@Override
|
||||
public Page<MdmLinkJson> getDuplicateGoldenResources(MdmTransactionContext theMdmTransactionContext, MdmPageRequest thePageRequest, RequestDetails theRequestDetails, String theRequestResourceType) {
|
||||
Page<MdmLinkJson> resultPage;
|
||||
RequestPartitionId readPartitionId = myRequestPartitionHelperSvc.determineReadPartitionForRequest(theRequestDetails, null, null);
|
||||
RequestPartitionId readPartitionId = myRequestPartitionHelperSvc.determineReadPartitionForRequest(theRequestDetails, null);
|
||||
|
||||
if (readPartitionId.isAllPartitions()){
|
||||
resultPage = myMdmLinkQuerySvc.getDuplicateGoldenResources(theMdmTransactionContext, thePageRequest, null, theRequestResourceType);
|
||||
if (readPartitionId.isAllPartitions()) {
|
||||
resultPage = myMdmLinkQuerySvc.getDuplicateGoldenResources(theMdmTransactionContext, thePageRequest, null, theRequestResourceType);
|
||||
} else {
|
||||
resultPage = myMdmLinkQuerySvc.getDuplicateGoldenResources(theMdmTransactionContext, thePageRequest, readPartitionId.getPartitionIds(), theRequestResourceType);
|
||||
resultPage = myMdmLinkQuerySvc.getDuplicateGoldenResources(theMdmTransactionContext, thePageRequest, readPartitionId.getPartitionIds(), theRequestResourceType);
|
||||
}
|
||||
|
||||
validateMdmQueryPermissions(readPartitionId, resultPage.getContent(), theRequestDetails);
|
||||
|
@ -205,12 +203,12 @@ public class MdmControllerSvcImpl implements IMdmControllerSvc {
|
|||
public IBaseParameters submitMdmClearJob(@Nonnull List<String> theResourceNames, IPrimitiveType<BigDecimal> theBatchSize, ServletRequestDetails theRequestDetails) {
|
||||
MdmClearJobParameters params = new MdmClearJobParameters();
|
||||
params.setResourceNames(theResourceNames);
|
||||
if (theBatchSize != null && theBatchSize.getValue() !=null && theBatchSize.getValue().longValue() > 0) {
|
||||
if (theBatchSize != null && theBatchSize.getValue() != null && theBatchSize.getValue().longValue() > 0) {
|
||||
params.setBatchSize(theBatchSize.getValue().intValue());
|
||||
}
|
||||
|
||||
ReadPartitionIdRequestDetails details= new ReadPartitionIdRequestDetails(null, RestOperationTypeEnum.EXTENDED_OPERATION_SERVER, null, null, null);
|
||||
RequestPartitionId requestPartition = myRequestPartitionHelperSvc.determineReadPartitionForRequest(theRequestDetails, null, details);
|
||||
ReadPartitionIdRequestDetails details = ReadPartitionIdRequestDetails.forOperation(null, null, ProviderConstants.OPERATION_MDM_CLEAR);
|
||||
RequestPartitionId requestPartition = myRequestPartitionHelperSvc.determineReadPartitionForRequest(theRequestDetails, details);
|
||||
params.setRequestPartitionId(requestPartition);
|
||||
|
||||
JobInstanceStartRequest request = new JobInstanceStartRequest();
|
||||
|
@ -226,10 +224,10 @@ public class MdmControllerSvcImpl implements IMdmControllerSvc {
|
|||
|
||||
|
||||
@Override
|
||||
public IBaseParameters submitMdmSubmitJob(List<String> theUrls, IPrimitiveType<BigDecimal> theBatchSize, ServletRequestDetails theRequestDetails) {
|
||||
public IBaseParameters submitMdmSubmitJob(List<String> theUrls, IPrimitiveType<BigDecimal> theBatchSize, ServletRequestDetails theRequestDetails) {
|
||||
MdmSubmitJobParameters params = new MdmSubmitJobParameters();
|
||||
|
||||
if (theBatchSize != null && theBatchSize.getValue() !=null && theBatchSize.getValue().longValue() > 0) {
|
||||
if (theBatchSize != null && theBatchSize.getValue() != null && theBatchSize.getValue().longValue() > 0) {
|
||||
params.setBatchSize(theBatchSize.getValue().intValue());
|
||||
}
|
||||
params.setRequestPartitionId(RequestPartitionId.allPartitions());
|
||||
|
@ -256,12 +254,12 @@ public class MdmControllerSvcImpl implements IMdmControllerSvc {
|
|||
myIMdmLinkUpdaterSvc.notDuplicateGoldenResource(goldenResource, target, theMdmTransactionContext);
|
||||
}
|
||||
|
||||
private void validateMdmQueryPermissions(RequestPartitionId theRequestPartitionId, List<MdmLinkJson> theMdmLinkJsonList, RequestDetails theRequestDetails){
|
||||
private void validateMdmQueryPermissions(RequestPartitionId theRequestPartitionId, List<MdmLinkJson> theMdmLinkJsonList, RequestDetails theRequestDetails) {
|
||||
Set<String> seenResourceTypes = new HashSet<>();
|
||||
for (MdmLinkJson mdmLinkJson : theMdmLinkJsonList){
|
||||
for (MdmLinkJson mdmLinkJson : theMdmLinkJsonList) {
|
||||
IdDt idDt = new IdDt(mdmLinkJson.getSourceId());
|
||||
|
||||
if (!seenResourceTypes.contains(idDt.getResourceType())){
|
||||
if (!seenResourceTypes.contains(idDt.getResourceType())) {
|
||||
myRequestPartitionHelperSvc.validateHasPartitionPermissions(theRequestDetails, idDt.getResourceType(), theRequestPartitionId);
|
||||
seenResourceTypes.add(idDt.getResourceType());
|
||||
}
|
||||
|
|
|
@ -36,7 +36,7 @@ public class MdmProviderMatchR4Test extends BaseProviderR4Test {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testMatch() throws Exception {
|
||||
public void testMatch() {
|
||||
Patient jane = buildJanePatient();
|
||||
jane.setActive(true);
|
||||
Patient createdJane = createPatient(jane);
|
||||
|
|
|
@ -32,6 +32,8 @@ import org.junit.jupiter.api.BeforeEach;
|
|||
import org.junit.jupiter.api.Test;
|
||||
import org.mockito.ArgumentMatcher;
|
||||
import org.mockito.Mockito;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.boot.test.mock.mockito.SpyBean;
|
||||
import org.springframework.data.domain.Page;
|
||||
|
@ -41,6 +43,7 @@ import java.math.BigDecimal;
|
|||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static ca.uhn.fhir.mdm.provider.MdmProviderDstu3Plus.DEFAULT_PAGE_SIZE;
|
||||
import static ca.uhn.fhir.mdm.provider.MdmProviderDstu3Plus.MAX_PAGE_SIZE;
|
||||
|
@ -50,6 +53,8 @@ import static org.mockito.ArgumentMatchers.argThat;
|
|||
import static org.mockito.ArgumentMatchers.eq;
|
||||
|
||||
public class MdmControllerSvcImplTest extends BaseLinkR4Test {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(MdmControllerSvcImplTest.class);
|
||||
|
||||
@Autowired
|
||||
IMdmControllerSvc myMdmControllerSvc;
|
||||
|
||||
|
@ -137,13 +142,18 @@ public class MdmControllerSvcImplTest extends BaseLinkR4Test {
|
|||
assertEquals(MdmLinkSourceEnum.AUTO, link.getLinkSource());
|
||||
assertLinkCount(2);
|
||||
|
||||
runInTransaction(()->{
|
||||
ourLog.info("Links: {}", myMdmLinkDao.findAll().stream().map(t->t.toString()).collect(Collectors.joining("\n * ")));
|
||||
});
|
||||
|
||||
myCaptureQueriesListener.clear();
|
||||
Page<MdmLinkJson> resultPage = myMdmControllerSvc.getDuplicateGoldenResources(null,
|
||||
new MdmPageRequest((Integer) null, null, DEFAULT_PAGE_SIZE, MAX_PAGE_SIZE),
|
||||
new SystemRequestDetails().setRequestPartitionId(RequestPartitionId.fromPartitionId(1)), null);
|
||||
myCaptureQueriesListener.logSelectQueries();
|
||||
|
||||
assertEquals(resultPage.getContent().size(), 1);
|
||||
|
||||
assertEquals(resultPage.getContent().get(0).getSourceId(), patient.getIdElement().getResourceType() + "/" + patient.getIdElement().getIdPart());
|
||||
assertEquals(1, resultPage.getContent().size());
|
||||
assertEquals(patient.getIdElement().getResourceType() + "/" + patient.getIdElement().getIdPart(), resultPage.getContent().get(0).getSourceId());
|
||||
|
||||
Mockito.verify(myRequestPartitionHelperSvc, Mockito.atLeastOnce()).validateHasPartitionPermissions(any(), eq("Patient"), argThat(new PartitionIdMatcher(requestPartitionId)));
|
||||
}
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.5.2-SNAPSHOT</version>
|
||||
<version>6.5.3-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -30,6 +30,25 @@ public class PartitionSettings {
|
|||
private boolean myIncludePartitionInSearchHashes = false;
|
||||
private boolean myUnnamedPartitionMode;
|
||||
private Integer myDefaultPartitionId;
|
||||
private boolean myAlwaysOpenNewTransactionForDifferentPartition;
|
||||
|
||||
/**
|
||||
* Should we always open a new database transaction if the partition context changes
|
||||
*
|
||||
* @since 6.6.0
|
||||
*/
|
||||
public boolean isAlwaysOpenNewTransactionForDifferentPartition() {
|
||||
return myAlwaysOpenNewTransactionForDifferentPartition;
|
||||
}
|
||||
|
||||
/**
|
||||
* Should we always open a new database transaction if the partition context changes
|
||||
*
|
||||
* @since 6.6.0
|
||||
*/
|
||||
public void setAlwaysOpenNewTransactionForDifferentPartition(boolean theAlwaysOpenNewTransactionForDifferentPartition) {
|
||||
myAlwaysOpenNewTransactionForDifferentPartition = theAlwaysOpenNewTransactionForDifferentPartition;
|
||||
}
|
||||
|
||||
/**
|
||||
* If set to <code>true</code> (default is <code>false</code>) the <code>PARTITION_ID</code> value will be factored into the
|
||||
|
@ -136,6 +155,12 @@ public class PartitionSettings {
|
|||
myDefaultPartitionId = theDefaultPartitionId;
|
||||
}
|
||||
|
||||
/**
|
||||
* If enabled the JPA server will allow unqualified cross partition reference
|
||||
*/
|
||||
public boolean isAllowUnqualifiedCrossPartitionReference() {
|
||||
return myAllowReferencesAcrossPartitions.equals(PartitionSettings.CrossPartitionReferenceMode.ALLOWED_UNQUALIFIED);
|
||||
}
|
||||
|
||||
public enum CrossPartitionReferenceMode {
|
||||
|
||||
|
@ -145,18 +170,11 @@ public class PartitionSettings {
|
|||
NOT_ALLOWED,
|
||||
|
||||
/**
|
||||
* References can cross partition boundaries, in a way that hides the existence of partitions to the end user
|
||||
* References can cross partition boundaries, with an assumption that boundaries
|
||||
* will be managed by the database.
|
||||
*/
|
||||
ALLOWED_UNQUALIFIED
|
||||
ALLOWED_UNQUALIFIED,
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* If enabled the JPA server will allow unqualified cross partition reference
|
||||
*
|
||||
*/
|
||||
public boolean isAllowUnqualifiedCrossPartitionReference() {
|
||||
return myAllowReferencesAcrossPartitions.equals(PartitionSettings.CrossPartitionReferenceMode.ALLOWED_UNQUALIFIED);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -20,9 +20,10 @@ package ca.uhn.fhir.jpa.model.cross;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
|
||||
public interface IBasePersistedResource extends IResourceLookup {
|
||||
public interface IBasePersistedResource<T extends IResourcePersistentId<?>> extends IResourceLookup<T> {
|
||||
|
||||
IIdType getIdDt();
|
||||
|
||||
|
|
|
@ -24,7 +24,7 @@ import ca.uhn.fhir.rest.api.server.storage.IResourcePersistentId;
|
|||
|
||||
import java.util.Date;
|
||||
|
||||
public interface IResourceLookup<T extends IResourcePersistentId> {
|
||||
public interface IResourceLookup<T extends IResourcePersistentId<?>> {
|
||||
String getResourceType();
|
||||
|
||||
/**
|
||||
|
|
|
@ -65,8 +65,12 @@ public class HapiSequenceStyleGenerator implements IdentifierGenerator, Persiste
|
|||
|
||||
@Override
|
||||
public Serializable generate(SharedSessionContractImplementor theSession, Object theObject) throws HibernateException {
|
||||
Long next = (Long) myGen.generate(theSession, theObject);
|
||||
return myIdMassager.massage(myGeneratorName, next);
|
||||
Long retVal = myIdMassager.generate(myGeneratorName);
|
||||
if (retVal == null) {
|
||||
Long next = (Long) myGen.generate(theSession, theObject);
|
||||
retVal = myIdMassager.massage(myGeneratorName, next);
|
||||
}
|
||||
return retVal;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -20,6 +20,8 @@ package ca.uhn.fhir.jpa.model.dialect;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
|
||||
/**
|
||||
* This is an internal API and may change or disappear without notice
|
||||
*
|
||||
|
@ -29,6 +31,17 @@ public interface ISequenceValueMassager {
|
|||
|
||||
Long massage(String theGeneratorName, Long theId);
|
||||
|
||||
/**
|
||||
* If desired, the massager can supply an ID on its own. This method is tried first,
|
||||
* and if it returns null, the normal hi-lo generator is called and then
|
||||
* {@link #massage(String, Long)} is called on the output of that.
|
||||
*
|
||||
* @return Returns an ID or null
|
||||
*/
|
||||
@Nullable
|
||||
default Long generate(String theGeneratorName) {
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
final class NoopSequenceValueMassager implements ISequenceValueMassager {
|
||||
|
|
|
@ -22,6 +22,7 @@ package ca.uhn.fhir.jpa.model.entity;
|
|||
|
||||
import ca.uhn.fhir.context.FhirVersionEnum;
|
||||
import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource;
|
||||
import ca.uhn.fhir.jpa.model.dao.JpaPid;
|
||||
import ca.uhn.fhir.model.primitive.InstantDt;
|
||||
import org.hibernate.annotations.OptimisticLock;
|
||||
|
||||
|
@ -38,8 +39,10 @@ import java.util.Date;
|
|||
import static org.apache.commons.lang3.StringUtils.defaultString;
|
||||
|
||||
@MappedSuperclass
|
||||
public abstract class BaseHasResource extends BasePartitionable implements IBaseResourceEntity, IBasePersistedResource {
|
||||
public abstract class BaseHasResource extends BasePartitionable implements IBaseResourceEntity, IBasePersistedResource<JpaPid> {
|
||||
|
||||
public static final String RES_PUBLISHED = "RES_PUBLISHED";
|
||||
public static final String RES_UPDATED = "RES_UPDATED";
|
||||
@Column(name = "RES_DELETED_AT", nullable = true)
|
||||
@Temporal(TemporalType.TIMESTAMP)
|
||||
private Date myDeleted;
|
||||
|
@ -54,12 +57,12 @@ public abstract class BaseHasResource extends BasePartitionable implements IBase
|
|||
private boolean myHasTags;
|
||||
|
||||
@Temporal(TemporalType.TIMESTAMP)
|
||||
@Column(name = "RES_PUBLISHED", nullable = false)
|
||||
@Column(name = RES_PUBLISHED, nullable = false)
|
||||
@OptimisticLock(excluded = true)
|
||||
private Date myPublished;
|
||||
|
||||
@Temporal(TemporalType.TIMESTAMP)
|
||||
@Column(name = "RES_UPDATED", nullable = false)
|
||||
@Column(name = RES_UPDATED, nullable = false)
|
||||
@OptimisticLock(excluded = true)
|
||||
private Date myUpdated;
|
||||
|
||||
|
|
|
@ -21,6 +21,7 @@ package ca.uhn.fhir.jpa.model.entity;
|
|||
*/
|
||||
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
||||
import org.apache.commons.lang3.builder.EqualsBuilder;
|
||||
import org.apache.commons.lang3.builder.HashCodeBuilder;
|
||||
|
||||
|
@ -118,4 +119,14 @@ public class PartitionablePartitionId implements Cloneable {
|
|||
return RequestPartitionId.defaultPartition();
|
||||
}
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
public static PartitionablePartitionId toStoragePartition(@Nonnull RequestPartitionId theRequestPartitionId, @Nonnull PartitionSettings thePartitionSettings) {
|
||||
Integer partitionId = theRequestPartitionId.getFirstPartitionIdOrNull();
|
||||
if (partitionId == null) {
|
||||
partitionId = thePartitionSettings.getDefaultPartitionId();
|
||||
}
|
||||
return new PartitionablePartitionId(partitionId, theRequestPartitionId.getPartitionDate());
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -23,9 +23,6 @@ package ca.uhn.fhir.jpa.model.entity;
|
|||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.parser.IParser;
|
||||
|
||||
/**
|
||||
* @see ResourceHistoryTable#ENCODING_COL_LENGTH
|
||||
*/
|
||||
public enum ResourceEncodingEnum {
|
||||
|
||||
/*
|
||||
|
@ -47,7 +44,13 @@ public enum ResourceEncodingEnum {
|
|||
/**
|
||||
* Resource was deleted - No contents expected
|
||||
*/
|
||||
DEL;
|
||||
DEL,
|
||||
|
||||
/**
|
||||
* Externally stored resource - Resource text is a reference to an external storage location,
|
||||
* which will be stored in {@link ResourceHistoryTable#getResourceTextVc()}
|
||||
*/
|
||||
ESR;
|
||||
|
||||
public IParser newParser(FhirContext theContext) {
|
||||
return theContext.newJsonParser();
|
||||
|
|
|
@ -92,6 +92,7 @@ public class ResourceHistoryTable extends BaseHasResource implements Serializabl
|
|||
@Column(name = "RES_TEXT_VC", length = RES_TEXT_VC_MAX_LENGTH, nullable = true)
|
||||
@OptimisticLock(excluded = true)
|
||||
private String myResourceTextVc;
|
||||
|
||||
@Column(name = "RES_ENCODING", nullable = false, length = ENCODING_COL_LENGTH)
|
||||
@Enumerated(EnumType.STRING)
|
||||
@OptimisticLock(excluded = true)
|
||||
|
|
|
@ -98,6 +98,9 @@ public class ResourceLink extends BaseResourceIndex {
|
|||
@Transient
|
||||
private transient String myTargetResourceId;
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
*/
|
||||
public ResourceLink() {
|
||||
super();
|
||||
}
|
||||
|
|
|
@ -78,16 +78,18 @@ import java.util.stream.Collectors;
|
|||
|
||||
@Indexed(routingBinder= @RoutingBinderRef(type = ResourceTableRoutingBinder.class))
|
||||
@Entity
|
||||
@Table(name = "HFJ_RESOURCE", uniqueConstraints = {}, indexes = {
|
||||
@Table(name = ResourceTable.HFJ_RESOURCE, uniqueConstraints = {}, indexes = {
|
||||
// Do not reuse previously used index name: IDX_INDEXSTATUS, IDX_RES_TYPE
|
||||
@Index(name = "IDX_RES_DATE", columnList = "RES_UPDATED"),
|
||||
@Index(name = "IDX_RES_DATE", columnList = BaseHasResource.RES_UPDATED),
|
||||
@Index(name = "IDX_RES_TYPE_DEL_UPDATED", columnList = "RES_TYPE,RES_DELETED_AT,RES_UPDATED,PARTITION_ID,RES_ID"),
|
||||
})
|
||||
@NamedEntityGraph(name = "Resource.noJoins")
|
||||
public class ResourceTable extends BaseHasResource implements Serializable, IBasePersistedResource, IResourceLookup {
|
||||
public class ResourceTable extends BaseHasResource implements Serializable, IBasePersistedResource<JpaPid> {
|
||||
public static final int RESTYPE_LEN = 40;
|
||||
private static final int MAX_LANGUAGE_LENGTH = 20;
|
||||
private static final long serialVersionUID = 1L;
|
||||
public static final String HFJ_RESOURCE = "HFJ_RESOURCE";
|
||||
public static final String RES_TYPE = "RES_TYPE";
|
||||
|
||||
/**
|
||||
* Holds the narrative text only - Used for Fulltext searching but not directly stored in the DB
|
||||
|
@ -263,7 +265,7 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
|
|||
@OptimisticLock(excluded = true)
|
||||
private Collection<ResourceLink> myResourceLinksAsTarget;
|
||||
|
||||
@Column(name = "RES_TYPE", length = RESTYPE_LEN, nullable = false)
|
||||
@Column(name = RES_TYPE, length = RESTYPE_LEN, nullable = false)
|
||||
@FullTextField
|
||||
@OptimisticLock(excluded = true)
|
||||
private String myResourceType;
|
||||
|
@ -769,7 +771,7 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas
|
|||
}
|
||||
|
||||
@Override
|
||||
public IResourcePersistentId getPersistentId() {
|
||||
public JpaPid getPersistentId() {
|
||||
return JpaPid.fromId(getId());
|
||||
}
|
||||
|
||||
|
|
|
@ -42,6 +42,10 @@ import java.util.Set;
|
|||
|
||||
import static org.apache.commons.lang3.ObjectUtils.defaultIfNull;
|
||||
|
||||
/**
|
||||
* This class contains configuration options common to all hapi-fhir-storage implementations.
|
||||
* Ultimately it should live in that project
|
||||
*/
|
||||
public class StorageSettings {
|
||||
/**
|
||||
* @since 5.6.0
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.5.2-SNAPSHOT</version>
|
||||
<version>6.5.3-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@ package ca.uhn.fhir.interceptor.model;
|
|||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR - Core Library
|
||||
* HAPI FHIR Search Parameters
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
|
||||
* %%
|
||||
|
@ -20,31 +20,52 @@ package ca.uhn.fhir.interceptor.model;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.rest.api.RestOperationTypeEnum;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import javax.annotation.Nullable;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
|
||||
/**
|
||||
* This is a model class used as a parameter for the interceptor pointcut
|
||||
* {@link ca.uhn.fhir.interceptor.api.Pointcut#STORAGE_PARTITION_IDENTIFY_READ}.
|
||||
*/
|
||||
public class ReadPartitionIdRequestDetails extends PartitionIdRequestDetails {
|
||||
|
||||
private final String myResourceType;
|
||||
private final RestOperationTypeEnum myRestOperationType;
|
||||
private final IIdType myReadResourceId;
|
||||
private final Object mySearchParams;
|
||||
@Nullable
|
||||
private final SearchParameterMap mySearchParams;
|
||||
@Nullable
|
||||
private final IBaseResource myConditionalTargetOrNull;
|
||||
@Nullable
|
||||
private final String mySearchUuid;
|
||||
@Nullable
|
||||
private final String myExtendedOperationName;
|
||||
|
||||
public ReadPartitionIdRequestDetails(String theResourceType, RestOperationTypeEnum theRestOperationType, IIdType theReadResourceId, Object theSearchParams, @Nullable IBaseResource theConditionalTargetOrNull) {
|
||||
private ReadPartitionIdRequestDetails(String theResourceType, RestOperationTypeEnum theRestOperationType, IIdType theReadResourceId, @Nullable SearchParameterMap theSearchParams, @Nullable IBaseResource theConditionalTargetOrNull, @Nullable String theSearchUuid, String theExtendedOperationName) {
|
||||
myResourceType = theResourceType;
|
||||
myRestOperationType = theRestOperationType;
|
||||
myReadResourceId = theReadResourceId;
|
||||
mySearchParams = theSearchParams;
|
||||
myConditionalTargetOrNull = theConditionalTargetOrNull;
|
||||
mySearchUuid = theSearchUuid;
|
||||
myExtendedOperationName = theExtendedOperationName;
|
||||
}
|
||||
|
||||
public static ReadPartitionIdRequestDetails forRead(String theResourceType, IIdType theId, boolean theIsVread) {
|
||||
RestOperationTypeEnum op = theIsVread ? RestOperationTypeEnum.VREAD : RestOperationTypeEnum.READ;
|
||||
return new ReadPartitionIdRequestDetails(theResourceType, op, theId.withResourceType(theResourceType), null, null);
|
||||
@Nullable
|
||||
public String getExtendedOperationName() {
|
||||
return myExtendedOperationName;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
public String getSearchUuid() {
|
||||
return mySearchUuid;
|
||||
}
|
||||
|
||||
public String getResourceType() {
|
||||
|
@ -59,18 +80,45 @@ public class ReadPartitionIdRequestDetails extends PartitionIdRequestDetails {
|
|||
return myReadResourceId;
|
||||
}
|
||||
|
||||
public Object getSearchParams() {
|
||||
@Nullable
|
||||
public SearchParameterMap getSearchParams() {
|
||||
return mySearchParams;
|
||||
}
|
||||
|
||||
@Nullable
|
||||
public IBaseResource getConditionalTargetOrNull() {
|
||||
return myConditionalTargetOrNull;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param theId The resource ID (must include a resource type and ID)
|
||||
*/
|
||||
public static ReadPartitionIdRequestDetails forRead(IIdType theId) {
|
||||
assert isNotBlank(theId.getResourceType());
|
||||
assert isNotBlank(theId.getIdPart());
|
||||
return forRead(theId.getResourceType(), theId, false);
|
||||
}
|
||||
|
||||
public static ReadPartitionIdRequestDetails forOperation(@Nullable String theResourceType, @Nullable IIdType theId, @Nonnull String theExtendedOperationName) {
|
||||
RestOperationTypeEnum op;
|
||||
if (theId != null) {
|
||||
op = RestOperationTypeEnum.EXTENDED_OPERATION_INSTANCE;
|
||||
} else if (theResourceType != null) {
|
||||
op = RestOperationTypeEnum.EXTENDED_OPERATION_TYPE;
|
||||
} else {
|
||||
op = RestOperationTypeEnum.EXTENDED_OPERATION_INSTANCE;
|
||||
}
|
||||
|
||||
public static ReadPartitionIdRequestDetails forSearchType(String theResourceType, Object theParams, IBaseResource theConditionalOperationTargetOrNull) {
|
||||
return new ReadPartitionIdRequestDetails(theResourceType, RestOperationTypeEnum.SEARCH_TYPE, null, theParams, theConditionalOperationTargetOrNull);
|
||||
return new ReadPartitionIdRequestDetails(theResourceType, op, null, null, null, null, theExtendedOperationName);
|
||||
}
|
||||
|
||||
public static ReadPartitionIdRequestDetails forRead(String theResourceType, @Nonnull IIdType theId, boolean theIsVread) {
|
||||
RestOperationTypeEnum op = theIsVread ? RestOperationTypeEnum.VREAD : RestOperationTypeEnum.READ;
|
||||
return new ReadPartitionIdRequestDetails(theResourceType, op, theId.withResourceType(theResourceType), null, null, null, null);
|
||||
}
|
||||
|
||||
public static ReadPartitionIdRequestDetails forSearchType(String theResourceType, SearchParameterMap theParams, IBaseResource theConditionalOperationTargetOrNull) {
|
||||
return new ReadPartitionIdRequestDetails(theResourceType, RestOperationTypeEnum.SEARCH_TYPE, null, theParams, theConditionalOperationTargetOrNull, null, null);
|
||||
}
|
||||
|
||||
public static ReadPartitionIdRequestDetails forHistory(String theResourceType, IIdType theIdType) {
|
||||
|
@ -82,6 +130,10 @@ public class ReadPartitionIdRequestDetails extends PartitionIdRequestDetails {
|
|||
} else {
|
||||
restOperationTypeEnum = RestOperationTypeEnum.HISTORY_SYSTEM;
|
||||
}
|
||||
return new ReadPartitionIdRequestDetails(theResourceType, restOperationTypeEnum, theIdType, null, null);
|
||||
return new ReadPartitionIdRequestDetails(theResourceType, restOperationTypeEnum, theIdType, null, null, null, null);
|
||||
}
|
||||
|
||||
public static ReadPartitionIdRequestDetails forSearchUuid(String theUuid) {
|
||||
return new ReadPartitionIdRequestDetails(null, RestOperationTypeEnum.GET_PAGE, null, null, null, theUuid, null);
|
||||
}
|
||||
}
|
|
@ -28,8 +28,8 @@ import javax.annotation.Nonnull;
|
|||
import java.util.List;
|
||||
|
||||
/**
|
||||
* This interface is used by the {@link IResourceChangeListenerCacheRefresher} to read resources matching the provided
|
||||
* search parameter map in the repository and compare them to caches stored in the {@link IResourceChangeListenerRegistry}.
|
||||
* This interface is used by the {@literal IResourceChangeListenerCacheRefresher} to read resources matching the provided
|
||||
* search parameter map in the repository and compare them to caches stored in the {@literal IResourceChangeListenerRegistry}.
|
||||
*/
|
||||
public interface IResourceVersionSvc {
|
||||
@Nonnull
|
||||
|
|
|
@ -20,7 +20,7 @@ package ca.uhn.fhir.jpa.cache;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource;
|
||||
import ca.uhn.fhir.model.primitive.IdDt;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
|
@ -44,7 +44,7 @@ public class ResourceVersionMap {
|
|||
private final Map<IIdType, Long> myMap = new HashMap<>();
|
||||
private ResourceVersionMap() {}
|
||||
|
||||
public static ResourceVersionMap fromResourceTableEntities(List<ResourceTable> theEntities) {
|
||||
public static ResourceVersionMap fromResourceTableEntities(List<? extends IBasePersistedResource> theEntities) {
|
||||
ResourceVersionMap retval = new ResourceVersionMap();
|
||||
theEntities.forEach(entity -> retval.add(entity.getIdDt()));
|
||||
return retval;
|
||||
|
|
|
@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.partition;
|
|||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR Storage api
|
||||
* HAPI FHIR Search Parameters
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
|
||||
* %%
|
||||
|
@ -22,7 +22,6 @@ package ca.uhn.fhir.jpa.partition;
|
|||
|
||||
import ca.uhn.fhir.interceptor.model.ReadPartitionIdRequestDetails;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.model.entity.PartitionablePartitionId;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
|
@ -35,18 +34,18 @@ import java.util.Set;
|
|||
public interface IRequestPartitionHelperSvc {
|
||||
|
||||
@Nonnull
|
||||
RequestPartitionId determineReadPartitionForRequest(@Nullable RequestDetails theRequest, String theResourceType, ReadPartitionIdRequestDetails theDetails);
|
||||
RequestPartitionId determineReadPartitionForRequest(@Nullable RequestDetails theRequest, ReadPartitionIdRequestDetails theDetails);
|
||||
|
||||
@Nonnull
|
||||
default RequestPartitionId determineReadPartitionForRequestForRead(RequestDetails theRequest, String theResourceType, IIdType theId) {
|
||||
default RequestPartitionId determineReadPartitionForRequestForRead(RequestDetails theRequest, String theResourceType, @Nonnull IIdType theId) {
|
||||
ReadPartitionIdRequestDetails details = ReadPartitionIdRequestDetails.forRead(theResourceType, theId, theId.hasVersionIdPart());
|
||||
return determineReadPartitionForRequest(theRequest, theResourceType, details);
|
||||
return determineReadPartitionForRequest(theRequest, details);
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
default RequestPartitionId determineReadPartitionForRequestForSearchType(RequestDetails theRequest, String theResourceType, SearchParameterMap theParams, IBaseResource theConditionalOperationTargetOrNull) {
|
||||
ReadPartitionIdRequestDetails details = ReadPartitionIdRequestDetails.forSearchType(theResourceType, theParams, theConditionalOperationTargetOrNull);
|
||||
return determineReadPartitionForRequest(theRequest, theResourceType, details);
|
||||
return determineReadPartitionForRequest(theRequest, details);
|
||||
}
|
||||
|
||||
RequestPartitionId determineGenericPartitionForRequest(RequestDetails theRequestDetails);
|
||||
|
@ -54,18 +53,16 @@ public interface IRequestPartitionHelperSvc {
|
|||
@Nonnull
|
||||
default RequestPartitionId determineReadPartitionForRequestForHistory(RequestDetails theRequest, String theResourceType, IIdType theIdType) {
|
||||
ReadPartitionIdRequestDetails details = ReadPartitionIdRequestDetails.forHistory(theResourceType, theIdType);
|
||||
return determineReadPartitionForRequest(theRequest, theResourceType, details);
|
||||
return determineReadPartitionForRequest(theRequest, details);
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
default void validateHasPartitionPermissions(RequestDetails theRequest, String theResourceType, RequestPartitionId theRequestPartitionId){}
|
||||
default void validateHasPartitionPermissions(RequestDetails theRequest, String theResourceType, RequestPartitionId theRequestPartitionId) {
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
RequestPartitionId determineCreatePartitionForRequest(@Nullable RequestDetails theRequest, @Nonnull IBaseResource theResource, @Nonnull String theResourceType);
|
||||
|
||||
@Nonnull
|
||||
PartitionablePartitionId toStoragePartition(@Nonnull RequestPartitionId theRequestPartitionId);
|
||||
|
||||
@Nonnull
|
||||
Set<Integer> toReadPartitions(@Nonnull RequestPartitionId theRequestPartitionId);
|
||||
|
|
@ -121,8 +121,8 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor
|
|||
private BaseRuntimeChildDefinition myRangeHighValueChild;
|
||||
private BaseRuntimeChildDefinition myAddressLineValueChild;
|
||||
private BaseRuntimeChildDefinition myAddressCityValueChild;
|
||||
private BaseRuntimeChildDefinition myAddressStateValueChild;
|
||||
private BaseRuntimeChildDefinition myAddressDistrictValueChild;
|
||||
private BaseRuntimeChildDefinition myAddressStateValueChild;
|
||||
private BaseRuntimeChildDefinition myAddressCountryValueChild;
|
||||
private BaseRuntimeChildDefinition myAddressPostalCodeValueChild;
|
||||
private BaseRuntimeChildDefinition myCapabilityStatementRestSecurityServiceValueChild;
|
||||
|
|
|
@ -0,0 +1,78 @@
|
|||
package ca.uhn.fhir.jpa.searchparam.extractor;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR Search Parameters
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2023 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
|
||||
public class CrossPartitionReferenceDetails {
|
||||
|
||||
@Nonnull
|
||||
private final RequestPartitionId mySourceResourcePartitionId;
|
||||
@Nonnull
|
||||
private final PathAndRef myPathAndRef;
|
||||
@Nonnull
|
||||
private final RequestDetails myRequestDetails;
|
||||
@Nonnull
|
||||
private final TransactionDetails myTransactionDetails;
|
||||
@Nonnull
|
||||
private final String mySourceResourceName;
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
*/
|
||||
public CrossPartitionReferenceDetails(@Nonnull RequestPartitionId theSourceResourcePartitionId, @Nonnull String theSourceResourceName, @Nonnull PathAndRef thePathAndRef, @Nonnull RequestDetails theRequestDetails, @Nonnull TransactionDetails theTransactionDetails) {
|
||||
mySourceResourcePartitionId = theSourceResourcePartitionId;
|
||||
mySourceResourceName = theSourceResourceName;
|
||||
myPathAndRef = thePathAndRef;
|
||||
myRequestDetails = theRequestDetails;
|
||||
myTransactionDetails = theTransactionDetails;
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
public String getSourceResourceName() {
|
||||
return mySourceResourceName;
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
public RequestDetails getRequestDetails() {
|
||||
return myRequestDetails;
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
public TransactionDetails getTransactionDetails() {
|
||||
return myTransactionDetails;
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
public RequestPartitionId getSourceResourcePartitionId() {
|
||||
return mySourceResourcePartitionId;
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
public PathAndRef getPathAndRef() {
|
||||
return myPathAndRef;
|
||||
}
|
||||
|
||||
}
|
|
@ -31,8 +31,8 @@ import ca.uhn.fhir.interceptor.api.Pointcut;
|
|||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
||||
import ca.uhn.fhir.jpa.model.cross.IResourceLookup;
|
||||
import ca.uhn.fhir.jpa.model.entity.BasePartitionable;
|
||||
import ca.uhn.fhir.jpa.model.dao.JpaPid;
|
||||
import ca.uhn.fhir.jpa.model.entity.BasePartitionable;
|
||||
import ca.uhn.fhir.jpa.model.entity.BaseResourceIndexedSearchParam;
|
||||
import ca.uhn.fhir.jpa.model.entity.IResourceIndexComboSearchParameter;
|
||||
import ca.uhn.fhir.jpa.model.entity.StorageSettings;
|
||||
|
@ -50,6 +50,7 @@ import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamUri;
|
|||
import ca.uhn.fhir.jpa.model.entity.ResourceLink;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage;
|
||||
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
|
||||
import ca.uhn.fhir.parser.DataFormatException;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
|
||||
|
@ -71,8 +72,8 @@ import java.util.Collection;
|
|||
import java.util.Date;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.isBlank;
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
|
@ -93,6 +94,8 @@ public class SearchParamExtractorService {
|
|||
private PartitionSettings myPartitionSettings;
|
||||
@Autowired(required = false)
|
||||
private IResourceLinkResolver myResourceLinkResolver;
|
||||
@Autowired
|
||||
private IRequestPartitionHelperSvc myPartitionHelperSvc;
|
||||
|
||||
@VisibleForTesting
|
||||
public void setSearchParamExtractor(ISearchParamExtractor theSearchParamExtractor) {
|
||||
|
@ -436,7 +439,7 @@ public class SearchParamExtractorService {
|
|||
* one we already have.
|
||||
*/
|
||||
Optional<ResourceLink> optionalResourceLink = findMatchingResourceLink(thePathAndRef, theExistingParams.getResourceLinks());
|
||||
if(optionalResourceLink.isPresent()){
|
||||
if (optionalResourceLink.isPresent()) {
|
||||
resourceLink = optionalResourceLink.get();
|
||||
} else {
|
||||
resourceLink = resolveTargetAndCreateResourceLinkOrReturnNull(theRequestPartitionId, theSourceResourceName, thePathAndRef, theEntity, transactionDate, nextId, theRequest, theTransactionDetails);
|
||||
|
@ -480,7 +483,7 @@ public class SearchParamExtractorService {
|
|||
|
||||
boolean hasMatchingResourceVersion = myContext.getParserOptions().isStripVersionsFromReferences() || referenceElement.getVersionIdPartAsLong() == null || referenceElement.getVersionIdPartAsLong().equals(resourceLink.getTargetResourceVersion());
|
||||
|
||||
if( hasMatchingSearchParamPath && hasMatchingResourceType && hasMatchingResourceId && hasMatchingResourceVersion) {
|
||||
if (hasMatchingSearchParamPath && hasMatchingResourceType && hasMatchingResourceId && hasMatchingResourceVersion) {
|
||||
return Optional.of(resourceLink);
|
||||
}
|
||||
}
|
||||
|
@ -567,19 +570,33 @@ public class SearchParamExtractorService {
|
|||
* target any more times than we have to.
|
||||
*/
|
||||
|
||||
RequestPartitionId targetRequestPartitionId = theRequestPartitionId;
|
||||
if (myPartitionSettings.isPartitioningEnabled() && myPartitionSettings.getAllowReferencesAcrossPartitions() == PartitionSettings.CrossPartitionReferenceMode.ALLOWED_UNQUALIFIED) {
|
||||
targetRequestPartitionId = RequestPartitionId.allPartitions();
|
||||
IResourceLookup<JpaPid> targetResource;
|
||||
if (myPartitionSettings.isPartitioningEnabled()) {
|
||||
if (myPartitionSettings.getAllowReferencesAcrossPartitions() == PartitionSettings.CrossPartitionReferenceMode.ALLOWED_UNQUALIFIED) {
|
||||
|
||||
// Interceptor: Pointcut.JPA_CROSS_PARTITION_REFERENCE_DETECTED
|
||||
if (CompositeInterceptorBroadcaster.hasHooks(Pointcut.JPA_RESOLVE_CROSS_PARTITION_REFERENCE, myInterceptorBroadcaster, theRequest)) {
|
||||
CrossPartitionReferenceDetails referenceDetails = new CrossPartitionReferenceDetails(theRequestPartitionId, theSourceResourceName, thePathAndRef, theRequest, theTransactionDetails);
|
||||
HookParams params = new HookParams(referenceDetails);
|
||||
targetResource = (IResourceLookup<JpaPid>) CompositeInterceptorBroadcaster.doCallHooksAndReturnObject(myInterceptorBroadcaster, theRequest, Pointcut.JPA_RESOLVE_CROSS_PARTITION_REFERENCE, params);
|
||||
} else {
|
||||
targetResource = myResourceLinkResolver.findTargetResource(RequestPartitionId.allPartitions(), theSourceResourceName, thePathAndRef, theRequest, theTransactionDetails);
|
||||
}
|
||||
|
||||
} else {
|
||||
targetResource = myResourceLinkResolver.findTargetResource(theRequestPartitionId, theSourceResourceName, thePathAndRef, theRequest, theTransactionDetails);
|
||||
}
|
||||
} else {
|
||||
targetResource = myResourceLinkResolver.findTargetResource(theRequestPartitionId, theSourceResourceName, thePathAndRef, theRequest, theTransactionDetails);
|
||||
}
|
||||
|
||||
IResourceLookup targetResource = myResourceLinkResolver.findTargetResource(targetRequestPartitionId, theSourceResourceName, thePathAndRef, theRequest, theTransactionDetails);
|
||||
|
||||
if (targetResource == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
String targetResourceType = targetResource.getResourceType();
|
||||
Long targetResourcePid = ((JpaPid) targetResource.getPersistentId()).getId();
|
||||
Long targetResourcePid = targetResource.getPersistentId().getId();
|
||||
String targetResourceIdPart = theNextId.getIdPart();
|
||||
Long targetVersion = theNextId.getVersionIdPartAsLong();
|
||||
return ResourceLink.forLocalReference(thePathAndRef.getPath(), theEntity, targetResourceType, targetResourcePid, targetResourceIdPart, theUpdateTime, targetVersion);
|
||||
|
@ -597,8 +614,8 @@ public class SearchParamExtractorService {
|
|||
for (IResourceIndexComboSearchParameter next : theParams) {
|
||||
if (next.getResource() == null) {
|
||||
next.setResource(theResourceTable);
|
||||
if (next instanceof BasePartitionable){
|
||||
((BasePartitionable)next).setPartitionId(theResourceTable.getPartitionId());
|
||||
if (next instanceof BasePartitionable) {
|
||||
((BasePartitionable) next).setPartitionId(theResourceTable.getPartitionId());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -671,14 +688,16 @@ public class SearchParamExtractorService {
|
|||
}
|
||||
|
||||
// If extraction generated any warnings, broadcast an error
|
||||
for (String next : theSearchParamSet.getWarnings()) {
|
||||
StorageProcessingMessage messageHolder = new StorageProcessingMessage();
|
||||
messageHolder.setMessage(next);
|
||||
HookParams params = new HookParams()
|
||||
.add(RequestDetails.class, theRequestDetails)
|
||||
.addIfMatchesType(ServletRequestDetails.class, theRequestDetails)
|
||||
.add(StorageProcessingMessage.class, messageHolder);
|
||||
CompositeInterceptorBroadcaster.doCallHooks(theInterceptorBroadcaster, theRequestDetails, Pointcut.JPA_PERFTRACE_WARNING, params);
|
||||
if (CompositeInterceptorBroadcaster.hasHooks(Pointcut.JPA_PERFTRACE_WARNING, theInterceptorBroadcaster, theRequestDetails)) {
|
||||
for (String next : theSearchParamSet.getWarnings()) {
|
||||
StorageProcessingMessage messageHolder = new StorageProcessingMessage();
|
||||
messageHolder.setMessage(next);
|
||||
HookParams params = new HookParams()
|
||||
.add(RequestDetails.class, theRequestDetails)
|
||||
.addIfMatchesType(ServletRequestDetails.class, theRequestDetails)
|
||||
.add(StorageProcessingMessage.class, messageHolder);
|
||||
CompositeInterceptorBroadcaster.doCallHooks(theInterceptorBroadcaster, theRequestDetails, Pointcut.JPA_PERFTRACE_WARNING, params);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -36,9 +36,11 @@ public class SearchParamExtractorServiceTest {
|
|||
searchParamSet.addWarning("help i'm a bug");
|
||||
searchParamSet.addWarning("Spiff");
|
||||
|
||||
when(myJpaInterceptorBroadcaster.hasHooks(any())).thenReturn(true);
|
||||
when(myJpaInterceptorBroadcaster.callHooks(any(), any())).thenReturn(true);
|
||||
|
||||
SearchParamExtractorService.handleWarnings(new ServletRequestDetails(myRequestInterceptorBroadcaster), myJpaInterceptorBroadcaster, searchParamSet);
|
||||
ServletRequestDetails requestDetails = new ServletRequestDetails(myRequestInterceptorBroadcaster);
|
||||
SearchParamExtractorService.handleWarnings(requestDetails, myJpaInterceptorBroadcaster, searchParamSet);
|
||||
|
||||
verify(myJpaInterceptorBroadcaster, times(2)).callHooks(eq(Pointcut.JPA_PERFTRACE_WARNING), any());
|
||||
verify(myRequestInterceptorBroadcaster, times(2)).callHooks(eq(Pointcut.JPA_PERFTRACE_WARNING), any());
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.5.2-SNAPSHOT</version>
|
||||
<version>6.5.3-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -29,6 +29,7 @@ import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
|||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc;
|
||||
import ca.uhn.fhir.jpa.api.svc.ISearchSvc;
|
||||
import ca.uhn.fhir.jpa.dao.tx.HapiTransactionService;
|
||||
import ca.uhn.fhir.jpa.model.sched.HapiJob;
|
||||
import ca.uhn.fhir.jpa.model.sched.IHasScheduledJobs;
|
||||
import ca.uhn.fhir.jpa.model.sched.ISchedulerService;
|
||||
|
@ -103,6 +104,8 @@ public class SubscriptionTriggeringSvcImpl implements ISubscriptionTriggeringSvc
|
|||
private MatchUrlService myMatchUrlService;
|
||||
@Autowired
|
||||
private IResourceModifiedConsumer myResourceModifiedConsumer;
|
||||
@Autowired
|
||||
private HapiTransactionService myTransactionService;
|
||||
private int myMaxSubmitPerPass = DEFAULT_MAX_SUBMIT;
|
||||
private ExecutorService myExecutorService;
|
||||
|
||||
|
@ -150,8 +153,8 @@ public class SubscriptionTriggeringSvcImpl implements ISubscriptionTriggeringSvc
|
|||
|
||||
SubscriptionTriggeringJobDetails jobDetails = new SubscriptionTriggeringJobDetails();
|
||||
jobDetails.setJobId(UUID.randomUUID().toString());
|
||||
jobDetails.setRemainingResourceIds(resourceIds.stream().map(t -> t.getValue()).collect(Collectors.toList()));
|
||||
jobDetails.setRemainingSearchUrls(searchUrls.stream().map(t -> t.getValue()).collect(Collectors.toList()));
|
||||
jobDetails.setRemainingResourceIds(resourceIds.stream().map(IPrimitiveType::getValue).collect(Collectors.toList()));
|
||||
jobDetails.setRemainingSearchUrls(searchUrls.stream().map(IPrimitiveType::getValue).collect(Collectors.toList()));
|
||||
if (theSubscriptionId != null) {
|
||||
jobDetails.setSubscriptionId(theSubscriptionId.getIdPart());
|
||||
}
|
||||
|
@ -329,12 +332,17 @@ public class SubscriptionTriggeringSvcImpl implements ISubscriptionTriggeringSvc
|
|||
IFhirResourceDao<?> resourceDao = myDaoRegistry.getResourceDao(theJobDetails.getCurrentSearchResourceType());
|
||||
|
||||
int maxQuerySize = myMaxSubmitPerPass - totalSubmitted;
|
||||
int toIndex = fromIndex + maxQuerySize;
|
||||
int toIndex;
|
||||
if (theJobDetails.getCurrentSearchCount() != null) {
|
||||
toIndex = Math.min(toIndex, theJobDetails.getCurrentSearchCount());
|
||||
toIndex = Math.min(fromIndex + maxQuerySize, theJobDetails.getCurrentSearchCount());
|
||||
} else {
|
||||
toIndex = fromIndex + maxQuerySize;
|
||||
}
|
||||
|
||||
ourLog.info("Triggering job[{}] search {} requesting resources {} - {}", theJobDetails.getJobId(), theJobDetails.getCurrentSearchUuid(), fromIndex, toIndex);
|
||||
List<IResourcePersistentId> resourceIds = mySearchCoordinatorSvc.getResources(theJobDetails.getCurrentSearchUuid(), fromIndex, toIndex, null);
|
||||
List<IResourcePersistentId<?>> resourceIds;
|
||||
RequestPartitionId requestPartitionId = RequestPartitionId.allPartitions();
|
||||
resourceIds = mySearchCoordinatorSvc.getResources(theJobDetails.getCurrentSearchUuid(), fromIndex, toIndex, null, requestPartitionId);
|
||||
|
||||
ourLog.info("Triggering job[{}] delivering {} resources", theJobDetails.getJobId(), resourceIds.size());
|
||||
int highestIndexSubmitted = theJobDetails.getCurrentSearchLastUploadedIndex();
|
||||
|
|
|
@ -7,6 +7,7 @@ import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
|||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.cache.IResourceVersionSvc;
|
||||
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
||||
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
|
||||
import ca.uhn.fhir.jpa.searchparam.config.SearchParamConfig;
|
||||
import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamProvider;
|
||||
import ca.uhn.fhir.jpa.subscription.channel.subscription.SubscriptionChannelFactory;
|
||||
|
@ -76,6 +77,11 @@ public class DaoSubscriptionMatcherTest {
|
|||
return mock(IResourceVersionSvc.class, RETURNS_DEEP_STUBS);
|
||||
}
|
||||
|
||||
@Bean
|
||||
public IRequestPartitionHelperSvc requestPartitionHelperSvc() {
|
||||
return mock(IRequestPartitionHelperSvc.class);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -3,6 +3,7 @@ package ca.uhn.fhir.jpa.subscription.module;
|
|||
import ca.uhn.fhir.interceptor.api.IInterceptorService;
|
||||
import ca.uhn.fhir.interceptor.executor.InterceptorService;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
|
||||
import ca.uhn.fhir.jpa.searchparam.config.SearchParamConfig;
|
||||
import ca.uhn.fhir.jpa.searchparam.registry.SearchParamRegistryImpl;
|
||||
import ca.uhn.fhir.jpa.subscription.channel.api.IChannelFactory;
|
||||
|
@ -25,6 +26,8 @@ import org.springframework.test.context.junit.jupiter.SpringExtension;
|
|||
|
||||
import java.util.Collections;
|
||||
|
||||
import static org.mockito.Mockito.mock;
|
||||
|
||||
@ExtendWith(SpringExtension.class)
|
||||
@ContextConfiguration(classes = {
|
||||
SearchParamConfig.class,
|
||||
|
@ -84,6 +87,11 @@ public abstract class BaseSubscriptionTest {
|
|||
return new InterceptorService();
|
||||
}
|
||||
|
||||
@Bean
|
||||
public IRequestPartitionHelperSvc requestPartitionHelperSvc() {
|
||||
return mock(IRequestPartitionHelperSvc.class);
|
||||
}
|
||||
|
||||
@Bean
|
||||
// Default implementation returns the name unchanged
|
||||
public IChannelNamer channelNamer() {
|
||||
|
|
|
@ -4,6 +4,7 @@ import ca.uhn.fhir.interceptor.api.HookParams;
|
|||
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.config.JpaStorageSettings;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.subscription.match.matcher.subscriber.SubscriptionCriteriaParser;
|
||||
import ca.uhn.fhir.jpa.subscription.match.matcher.subscriber.SubscriptionMatchingSubscriber;
|
||||
|
@ -20,14 +21,13 @@ import com.google.common.collect.Lists;
|
|||
import org.hl7.fhir.dstu3.model.BooleanType;
|
||||
import org.hl7.fhir.dstu3.model.Observation;
|
||||
import org.hl7.fhir.dstu3.model.Subscription;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Nested;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.mockito.Answers;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.Mockito;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.test.util.ReflectionTestUtils;
|
||||
|
||||
import java.util.Collections;
|
||||
|
@ -46,7 +46,6 @@ import static org.mockito.Mockito.when;
|
|||
* Tests copied from jpa.subscription.resthook.RestHookTestDstu3Test
|
||||
*/
|
||||
public class SubscriptionMatchingSubscriberTest extends BaseBlockingQueueSubscribableChannelDstu3Test {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(SubscriptionMatchingSubscriberTest.class);
|
||||
private final IFhirResourceDao<Subscription> myMockSubscriptionDao = Mockito.mock(IFhirResourceDao.class);
|
||||
|
||||
@BeforeEach
|
||||
|
@ -55,6 +54,11 @@ public class SubscriptionMatchingSubscriberTest extends BaseBlockingQueueSubscri
|
|||
myDaoRegistry.register(myMockSubscriptionDao);
|
||||
}
|
||||
|
||||
@AfterEach
|
||||
public void afterEach() {
|
||||
myStorageSettings.setCrossPartitionSubscriptionEnabled(new JpaStorageSettings().isCrossPartitionSubscriptionEnabled());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRestHookSubscriptionApplicationFhirJson() throws Exception {
|
||||
String payload = "application/fhir+json";
|
||||
|
@ -392,6 +396,13 @@ public class SubscriptionMatchingSubscriberTest extends BaseBlockingQueueSubscri
|
|||
ourObservationListener.awaitExpected();
|
||||
}
|
||||
|
||||
private void mockSubscriptionRead(RequestPartitionId theRequestPartitionId, Subscription subscription) {
|
||||
Subscription modifiedSubscription = subscription.copy();
|
||||
// the original partition info was the request info, but we need the actual storage partition.
|
||||
modifiedSubscription.setUserData(Constants.RESOURCE_PARTITION_ID, theRequestPartitionId);
|
||||
when(myMockSubscriptionDao.read(eq(subscription.getIdElement()), any())).thenReturn(modifiedSubscription);
|
||||
}
|
||||
|
||||
@Nested
|
||||
public class TestDeleteMessages {
|
||||
private final SubscriptionMatchingSubscriber subscriber = new SubscriptionMatchingSubscriber();
|
||||
|
@ -460,7 +471,7 @@ public class SubscriptionMatchingSubscriberTest extends BaseBlockingQueueSubscri
|
|||
eq(Pointcut.SUBSCRIPTION_BEFORE_PERSISTED_RESOURCE_CHECKED), any(HookParams.class))).thenReturn(true);
|
||||
when(message.getPayloadId(null)).thenReturn(new IdDt("Patient", 123L));
|
||||
when(myNonDeleteCanonicalSubscription.getSendDeleteMessages()).thenReturn(false);
|
||||
when(mySubscriptionRegistry.getAll()).thenReturn(List.of(myNonDeleteSubscription ,myActiveSubscription));
|
||||
when(mySubscriptionRegistry.getAll()).thenReturn(List.of(myNonDeleteSubscription, myActiveSubscription));
|
||||
when(myActiveSubscription.getSubscription()).thenReturn(myCanonicalSubscription);
|
||||
when(myActiveSubscription.getCriteria()).thenReturn(mySubscriptionCriteria);
|
||||
when(myActiveSubscription.getId()).thenReturn("Patient/123");
|
||||
|
@ -496,12 +507,4 @@ public class SubscriptionMatchingSubscriberTest extends BaseBlockingQueueSubscri
|
|||
verify(message, atLeastOnce()).getPayloadId(null);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private void mockSubscriptionRead(RequestPartitionId theRequestPartitionId, Subscription subscription) {
|
||||
Subscription modifiedSubscription = subscription.copy();
|
||||
// the original partition info was the request info, but we need the actual storage partition.
|
||||
modifiedSubscription.setUserData(Constants.RESOURCE_PARTITION_ID, theRequestPartitionId);
|
||||
when(myMockSubscriptionDao.read(eq(subscription.getIdElement()), any())).thenReturn(modifiedSubscription);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.5.2-SNAPSHOT</version>
|
||||
<version>6.5.3-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -34,8 +34,10 @@ import static org.junit.jupiter.api.Assertions.fail;
|
|||
public class FhirResourceDaoDstu2ValidateTest extends BaseJpaDstu2Test {
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirResourceDaoDstu2ValidateTest.class);
|
||||
|
||||
@Override
|
||||
@BeforeEach
|
||||
public void before() {
|
||||
public void before() throws Exception {
|
||||
super.before();
|
||||
myStorageSettings.setAllowExternalReferences(true);
|
||||
}
|
||||
|
||||
|
|
|
@ -2792,7 +2792,7 @@ public class ResourceProviderDstu2Test extends BaseResourceProviderDstu2Test {
|
|||
* Upload structurredef
|
||||
*/
|
||||
|
||||
String contents = IOUtils.toString(getClass().getResourceAsStream("/allergyintolerance-sd-david.json"), "UTF-8");
|
||||
String contents = ClasspathUtil.loadResource("/allergyintolerance-sd-david.json");
|
||||
HttpEntityEnclosingRequestBase post = new HttpPut(myServerBase + "/StructureDefinition/ohAllergyIntolerance");
|
||||
post.setEntity(new StringEntity(contents, ContentType.create(Constants.CT_FHIR_JSON, "UTF-8")));
|
||||
CloseableHttpResponse response = ourHttpClient.execute(post);
|
||||
|
@ -2809,7 +2809,7 @@ public class ResourceProviderDstu2Test extends BaseResourceProviderDstu2Test {
|
|||
* Validate
|
||||
*/
|
||||
|
||||
contents = IOUtils.toString(getClass().getResourceAsStream("/allergyintolerance-david.json"), "UTF-8");
|
||||
contents = ClasspathUtil.loadResource("/allergyintolerance-david.json");
|
||||
|
||||
post = new HttpPost(myServerBase + "/AllergyIntolerance/$validate?_pretty=true");
|
||||
post.setEntity(new StringEntity(contents, ContentType.create(Constants.CT_FHIR_JSON, "UTF-8")));
|
||||
|
|
|
@ -80,11 +80,12 @@ import static org.mockito.Mockito.when;
|
|||
|
||||
@SuppressWarnings({"unchecked"})
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
public class SearchCoordinatorSvcImplTest extends BaseSearchSvc{
|
||||
public class SearchCoordinatorSvcImplTest extends BaseSearchSvc {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(SearchCoordinatorSvcImplTest.class);
|
||||
|
||||
@Mock private SearchStrategyFactory mySearchStrategyFactory;
|
||||
|
||||
@Spy
|
||||
protected FhirContext myContext = FhirContext.forDstu2Cached();
|
||||
@Mock
|
||||
private SearchStrategyFactory mySearchStrategyFactory;
|
||||
@Mock
|
||||
private ISearchCacheSvc mySearchCacheSvc;
|
||||
@Mock
|
||||
|
@ -100,9 +101,6 @@ public class SearchCoordinatorSvcImplTest extends BaseSearchSvc{
|
|||
private IRequestPartitionHelperSvc myPartitionHelperSvc;
|
||||
@Mock
|
||||
private ISynchronousSearchSvc mySynchronousSearchSvc;
|
||||
@Spy
|
||||
protected FhirContext myContext = FhirContext.forDstu2Cached();
|
||||
|
||||
@Spy
|
||||
private ExceptionService myExceptionSvc = new ExceptionService(myContext);
|
||||
|
||||
|
@ -118,6 +116,7 @@ public class SearchCoordinatorSvcImplTest extends BaseSearchSvc{
|
|||
@BeforeEach
|
||||
public void before() {
|
||||
HapiSystemProperties.enableUnitTestCaptureStack();
|
||||
HapiSystemProperties.enableUnitTestMode();
|
||||
|
||||
myCurrentSearch = null;
|
||||
|
||||
|
@ -178,7 +177,7 @@ public class SearchCoordinatorSvcImplTest extends BaseSearchSvc{
|
|||
assertEquals(allResults.size(), oldResults.size());
|
||||
allResults.addAll(newResults);
|
||||
return null;
|
||||
}).when(mySearchResultCacheSvc).storeResults(any(), anyList(), anyList());
|
||||
}).when(mySearchResultCacheSvc).storeResults(any(), anyList(), anyList(), any(), any());
|
||||
|
||||
SearchParameterMap params = new SearchParameterMap();
|
||||
params.add("name", new StringParam("ANAME"));
|
||||
|
@ -233,7 +232,7 @@ public class SearchCoordinatorSvcImplTest extends BaseSearchSvc{
|
|||
});
|
||||
|
||||
try {
|
||||
mySvc.getResources("1234-5678", 0, 100, null);
|
||||
mySvc.getResources("1234-5678", 0, 100, null, null);
|
||||
fail();
|
||||
} catch (ResourceGoneException e) {
|
||||
assertEquals("Search ID \"1234-5678\" does not exist and may have expired", e.getMessage());
|
||||
|
@ -255,7 +254,7 @@ public class SearchCoordinatorSvcImplTest extends BaseSearchSvc{
|
|||
});
|
||||
|
||||
try {
|
||||
mySvc.getResources("1234-5678", 0, 100, null);
|
||||
mySvc.getResources("1234-5678", 0, 100, null, null);
|
||||
fail();
|
||||
} catch (InternalErrorException e) {
|
||||
assertThat(e.getMessage(), containsString("Request timed out"));
|
||||
|
@ -295,15 +294,16 @@ public class SearchCoordinatorSvcImplTest extends BaseSearchSvc{
|
|||
}
|
||||
|
||||
private void initAsyncSearches() {
|
||||
when(myPersistedJpaBundleProviderFactory.newInstanceFirstPage(nullable(RequestDetails.class), nullable(Search.class), nullable(SearchTask.class), nullable(ISearchBuilder.class))).thenAnswer(t->{
|
||||
when(myPersistedJpaBundleProviderFactory.newInstanceFirstPage(nullable(RequestDetails.class), nullable(Search.class), nullable(SearchTask.class), nullable(ISearchBuilder.class), nullable(RequestPartitionId.class))).thenAnswer(t -> {
|
||||
RequestDetails requestDetails = t.getArgument(0, RequestDetails.class);
|
||||
Search search = t.getArgument(1, Search.class);
|
||||
SearchTask searchTask = t.getArgument(2, SearchTask.class);
|
||||
ISearchBuilder<JpaPid> searchBuilder = t.getArgument(3, ISearchBuilder.class);
|
||||
PersistedJpaSearchFirstPageBundleProvider retVal = new PersistedJpaSearchFirstPageBundleProvider(search, searchTask, searchBuilder, requestDetails);
|
||||
PersistedJpaSearchFirstPageBundleProvider retVal = new PersistedJpaSearchFirstPageBundleProvider(search, searchTask, searchBuilder, requestDetails, null);
|
||||
retVal.setStorageSettingsForUnitTest(new JpaStorageSettings());
|
||||
retVal.setTxServiceForUnitTest(myTransactionService);
|
||||
retVal.setSearchCoordinatorSvcForUnitTest(mySvc);
|
||||
retVal.setRequestPartitionHelperSvcForUnitTest(myPartitionHelperSvc);
|
||||
retVal.setContext(myContext);
|
||||
return retVal;
|
||||
});
|
||||
|
@ -333,7 +333,7 @@ public class SearchCoordinatorSvcImplTest extends BaseSearchSvc{
|
|||
try {
|
||||
assertNotNull(searchId);
|
||||
ourLog.info("About to pull the first resource");
|
||||
List<JpaPid> resources = mySvc.getResources(searchId, 0, 1, null);
|
||||
List<JpaPid> resources = mySvc.getResources(searchId, 0, 1, null, null);
|
||||
ourLog.info("Done pulling the first resource");
|
||||
assertEquals(1, resources.size());
|
||||
} finally {
|
||||
|
@ -342,14 +342,14 @@ public class SearchCoordinatorSvcImplTest extends BaseSearchSvc{
|
|||
};
|
||||
new Thread(taskStarter).start();
|
||||
|
||||
await().until(()->iter.getCountReturned() >= 3);
|
||||
await().until(() -> iter.getCountReturned() >= 3);
|
||||
|
||||
ourLog.info("About to cancel all searches");
|
||||
mySvc.cancelAllActiveSearches();
|
||||
ourLog.info("Done cancelling all searches");
|
||||
|
||||
try {
|
||||
mySvc.getResources(searchId, 0, 1, null);
|
||||
mySvc.getResources(searchId, 0, 1, null, null);
|
||||
} catch (ResourceGoneException e) {
|
||||
// good
|
||||
}
|
||||
|
@ -373,9 +373,9 @@ public class SearchCoordinatorSvcImplTest extends BaseSearchSvc{
|
|||
List<JpaPid> pids = createPidSequence(800);
|
||||
IResultIterator iter = new SlowIterator(pids.iterator(), 2);
|
||||
when(mySearchBuilder.createQuery(same(params), any(), any(), nullable(RequestPartitionId.class))).thenReturn(iter);
|
||||
when(mySearchCacheSvc.save(any())).thenAnswer(t ->{
|
||||
when(mySearchCacheSvc.save(any())).thenAnswer(t -> {
|
||||
ourLog.info("Saving search");
|
||||
return t.getArgument( 0, Search.class);
|
||||
return t.getArgument(0, Search.class);
|
||||
});
|
||||
doAnswer(loadPids()).when(mySearchBuilder).loadResourcesByPid(any(Collection.class), any(Collection.class), any(List.class), anyBoolean(), any());
|
||||
|
||||
|
@ -448,6 +448,7 @@ public class SearchCoordinatorSvcImplTest extends BaseSearchSvc{
|
|||
search.setSearchType(SearchTypeEnum.SEARCH);
|
||||
search.setResourceType("Patient");
|
||||
search.setStatus(SearchStatusEnum.LOADING);
|
||||
search.setSearchParameterMap(new SearchParameterMap());
|
||||
|
||||
when(mySearchCacheSvc.fetchByUuid(eq(uuid))).thenReturn(Optional.of(search));
|
||||
doAnswer(loadPids()).when(mySearchBuilder).loadResourcesByPid(any(Collection.class), any(Collection.class), any(List.class), anyBoolean(), any());
|
||||
|
@ -462,7 +463,7 @@ public class SearchCoordinatorSvcImplTest extends BaseSearchSvc{
|
|||
// ignore
|
||||
}
|
||||
|
||||
when(mySearchResultCacheSvc.fetchResultPids(any(Search.class), anyInt(), anyInt())).thenAnswer(theInvocation -> {
|
||||
when(mySearchResultCacheSvc.fetchResultPids(any(Search.class), anyInt(), anyInt(), any(), any())).thenAnswer(theInvocation -> {
|
||||
ArrayList<IResourcePersistentId> results = new ArrayList<>();
|
||||
for (long i = theInvocation.getArgument(1, Integer.class); i < theInvocation.getArgument(2, Integer.class); i++) {
|
||||
Long nextPid = i + 10L;
|
||||
|
@ -492,6 +493,7 @@ public class SearchCoordinatorSvcImplTest extends BaseSearchSvc{
|
|||
provider.setSearchBuilderFactoryForUnitTest(mySearchBuilderFactory);
|
||||
provider.setSearchCoordinatorSvcForUnitTest(mySvc);
|
||||
provider.setStorageSettingsForUnitTest(new JpaStorageSettings());
|
||||
provider.setRequestPartitionId(RequestPartitionId.defaultPartition());
|
||||
resources = provider.getResources(20, 40);
|
||||
assertEquals(20, resources.size());
|
||||
assertEquals("30", resources.get(0).getIdElement().getValueAsString());
|
||||
|
@ -511,6 +513,7 @@ public class SearchCoordinatorSvcImplTest extends BaseSearchSvc{
|
|||
provider.setDaoRegistryForUnitTest(myDaoRegistry);
|
||||
provider.setSearchCoordinatorSvcForUnitTest(mySvc);
|
||||
provider.setStorageSettingsForUnitTest(new JpaStorageSettings());
|
||||
provider.setRequestPartitionId(RequestPartitionId.defaultPartition());
|
||||
return provider;
|
||||
}
|
||||
|
||||
|
@ -566,12 +569,12 @@ public class SearchCoordinatorSvcImplTest extends BaseSearchSvc{
|
|||
search.setTotalCount(100);
|
||||
when(mySearchCacheSvc.fetchByUuid(eq("0000-1111"))).thenReturn(Optional.of(search));
|
||||
|
||||
when(mySearchResultCacheSvc.fetchResultPids(any(), anyInt(), anyInt())).thenReturn(null);
|
||||
when(mySearchResultCacheSvc.fetchResultPids(any(), anyInt(), anyInt(), any(), any())).thenReturn(null);
|
||||
|
||||
try {
|
||||
mySvc.getResources("0000-1111", 0, 10, null);
|
||||
mySvc.getResources("0000-1111", 0, 10, null, null);
|
||||
fail();
|
||||
} catch (ResourceGoneException e) {
|
||||
} catch (ResourceGoneException e) {
|
||||
assertEquals("Search ID \"0000-1111\" does not exist and may have expired", e.getMessage());
|
||||
}
|
||||
|
||||
|
@ -593,27 +596,70 @@ public class SearchCoordinatorSvcImplTest extends BaseSearchSvc{
|
|||
search.setSearchParameterMap(new SearchParameterMap());
|
||||
when(mySearchCacheSvc.fetchByUuid(eq("0000-1111"))).thenReturn(Optional.of(search));
|
||||
|
||||
when(mySearchCacheSvc.tryToMarkSearchAsInProgress(any())).thenAnswer(t->{
|
||||
when(mySearchCacheSvc.tryToMarkSearchAsInProgress(any())).thenAnswer(t -> {
|
||||
search.setStatus(SearchStatusEnum.LOADING);
|
||||
return Optional.of(search);
|
||||
});
|
||||
mockSearchTask();
|
||||
|
||||
when(mySearchResultCacheSvc.fetchAllResultPids(any())).thenReturn(null);
|
||||
when(mySearchResultCacheSvc.fetchAllResultPids(any(), any(), any())).thenReturn(null);
|
||||
|
||||
try {
|
||||
mySvc.getResources("0000-1111", 0, 10, null);
|
||||
mySvc.getResources("0000-1111", 0, 10, null, null);
|
||||
fail();
|
||||
} catch (ResourceGoneException e) {
|
||||
} catch (ResourceGoneException e) {
|
||||
assertEquals("Search ID \"0000-1111\" does not exist and may have expired", e.getMessage());
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private void mockSearchTask() {
|
||||
IPagingProvider pagingProvider = mock(IPagingProvider.class);
|
||||
lenient().when(pagingProvider.getMaximumPageSize())
|
||||
.thenReturn(500);
|
||||
when(myBeanFactory.getBean(anyString(), any(SearchTaskParameters.class)))
|
||||
.thenAnswer(invocation -> {
|
||||
String type = invocation.getArgument(0);
|
||||
switch (type) {
|
||||
case SearchConfig.SEARCH_TASK -> {
|
||||
return new SearchTask(
|
||||
invocation.getArgument(1),
|
||||
myTransactionService,
|
||||
ourCtx,
|
||||
myInterceptorBroadcaster,
|
||||
mySearchBuilderFactory,
|
||||
mySearchResultCacheSvc,
|
||||
myStorageSettings,
|
||||
mySearchCacheSvc,
|
||||
pagingProvider
|
||||
);
|
||||
}
|
||||
case SearchConfig.CONTINUE_TASK -> {
|
||||
return new SearchContinuationTask(
|
||||
invocation.getArgument(1),
|
||||
myTransactionService,
|
||||
ourCtx,
|
||||
myInterceptorBroadcaster,
|
||||
mySearchBuilderFactory,
|
||||
mySearchResultCacheSvc,
|
||||
myStorageSettings,
|
||||
mySearchCacheSvc,
|
||||
pagingProvider,
|
||||
myExceptionSvc
|
||||
);
|
||||
}
|
||||
default -> {
|
||||
fail("Invalid bean type: " + type);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public static class FailAfterNIterator extends BaseIterator<JpaPid> implements IResultIterator<JpaPid> {
|
||||
|
||||
private int myCount;
|
||||
private final IResultIterator<JpaPid> myWrap;
|
||||
private int myCount;
|
||||
|
||||
FailAfterNIterator(IResultIterator theWrap, int theCount) {
|
||||
myWrap = theWrap;
|
||||
|
@ -738,45 +784,4 @@ public class SearchCoordinatorSvcImplTest extends BaseSearchSvc{
|
|||
// nothing
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private void mockSearchTask() {
|
||||
IPagingProvider pagingProvider = mock(IPagingProvider.class);
|
||||
lenient().when(pagingProvider.getMaximumPageSize())
|
||||
.thenReturn(500);
|
||||
when(myBeanFactory.getBean(anyString(), any(SearchTaskParameters.class)))
|
||||
.thenAnswer(invocation -> {
|
||||
String type = invocation.getArgument(0);
|
||||
switch (type) {
|
||||
case SearchConfig.SEARCH_TASK:
|
||||
return new SearchTask(
|
||||
invocation.getArgument(1),
|
||||
myTransactionService,
|
||||
ourCtx,
|
||||
myInterceptorBroadcaster,
|
||||
mySearchBuilderFactory,
|
||||
mySearchResultCacheSvc,
|
||||
myStorageSettings,
|
||||
mySearchCacheSvc,
|
||||
pagingProvider
|
||||
);
|
||||
case SearchConfig.CONTINUE_TASK:
|
||||
return new SearchContinuationTask(
|
||||
invocation.getArgument(1),
|
||||
myTransactionService,
|
||||
ourCtx,
|
||||
myInterceptorBroadcaster,
|
||||
mySearchBuilderFactory,
|
||||
mySearchResultCacheSvc,
|
||||
myStorageSettings,
|
||||
mySearchCacheSvc,
|
||||
pagingProvider,
|
||||
myExceptionSvc
|
||||
);
|
||||
default:
|
||||
fail("Invalid bean type: " + type);
|
||||
return null;
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.5.2-SNAPSHOT</version>
|
||||
<version>6.5.3-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -2553,17 +2553,16 @@ public class ResourceProviderDstu3Test extends BaseResourceProviderDstu3Test {
|
|||
p.addName().setFamily("testMetaAddInvalid");
|
||||
IIdType id = myClient.create().resource(p).execute().getId().toUnqualifiedVersionless();
|
||||
|
||||
//@formatter:off
|
||||
String input = "<Parameters>\n" +
|
||||
" <meta>\n" +
|
||||
" <tag>\n" +
|
||||
" <system value=\"http://example.org/codes/tags\"/>\n" +
|
||||
" <code value=\"record-lost\"/>\n" +
|
||||
" <display value=\"Patient File Lost\"/>\n" +
|
||||
" </tag>\n" +
|
||||
" </meta>\n" +
|
||||
"</Parameters>";
|
||||
//@formatter:on
|
||||
String input = """
|
||||
<Parameters>
|
||||
<meta>
|
||||
<tag>
|
||||
<system value="http://example.org/codes/tags"/>
|
||||
<code value="record-lost"/>
|
||||
<display value="Patient File Lost"/>
|
||||
</tag>
|
||||
</meta>
|
||||
</Parameters>""";
|
||||
|
||||
HttpPost post = new HttpPost(myServerBase + "/Patient/" + id.getIdPart() + "/$meta-add");
|
||||
post.setEntity(new StringEntity(input, ContentType.create(Constants.CT_FHIR_XML, "UTF-8")));
|
||||
|
|
|
@ -103,7 +103,7 @@ public class ResourceProviderDstu3ValueSetVersionedTest extends BaseResourceProv
|
|||
myExtensionalCsId_v1 = myCodeSystemDao.create(theCodeSystem, mySrd).getId().toUnqualifiedVersionless();
|
||||
}
|
||||
});
|
||||
myCodeSystemDao.readEntity(myExtensionalCsId_v1, null).getId();
|
||||
myCodeSystemDao.readEntity(myExtensionalCsId_v1, null);
|
||||
|
||||
theCodeSystem.setId("CodeSystem/cs2");
|
||||
theCodeSystem.setVersion("2");
|
||||
|
@ -116,7 +116,7 @@ public class ResourceProviderDstu3ValueSetVersionedTest extends BaseResourceProv
|
|||
myExtensionalCsId_v2 = myCodeSystemDao.create(theCodeSystem, mySrd).getId().toUnqualifiedVersionless();
|
||||
}
|
||||
});
|
||||
myCodeSystemDao.readEntity(myExtensionalCsId_v2, null).getId();
|
||||
myCodeSystemDao.readEntity(myExtensionalCsId_v2, null);
|
||||
|
||||
}
|
||||
|
||||
|
@ -126,13 +126,13 @@ public class ResourceProviderDstu3ValueSetVersionedTest extends BaseResourceProv
|
|||
valueSet.setId("ValueSet/vs1");
|
||||
valueSet.getCompose().getInclude().get(0).setVersion("1");
|
||||
myExtensionalVsId_v1 = persistSingleValueSet(valueSet, HttpVerb.POST);
|
||||
myExtensionalVsIdOnResourceTable_v1 = myValueSetDao.readEntity(myExtensionalVsId_v1, null).getId();
|
||||
myExtensionalVsIdOnResourceTable_v1 = myValueSetDao.readEntity(myExtensionalVsId_v1, null).getIdDt().getIdPartAsLong();
|
||||
|
||||
valueSet.setVersion("2");
|
||||
valueSet.setId("ValueSet/vs2");
|
||||
valueSet.getCompose().getInclude().get(0).setVersion("2");
|
||||
myExtensionalVsId_v2 = persistSingleValueSet(valueSet, HttpVerb.POST);
|
||||
myExtensionalVsIdOnResourceTable_v2 = myValueSetDao.readEntity(myExtensionalVsId_v2, null).getId();
|
||||
myExtensionalVsIdOnResourceTable_v2 = myValueSetDao.readEntity(myExtensionalVsId_v2, null).getIdDt().getIdPartAsLong();
|
||||
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,46 @@
|
|||
<configuration>
|
||||
<appender name="STDOUT" class="ch.qos.logback.core.ConsoleAppender">
|
||||
<encoder>
|
||||
<pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} [%thread] %-5level %logger{36} [%file:%line] %msg%n</pattern>
|
||||
</encoder>
|
||||
</appender>
|
||||
|
||||
<!-- define the root first, so the rest can inherit our logger -->
|
||||
<root level="info">
|
||||
<appender-ref ref="STDOUT" />
|
||||
</root>
|
||||
|
||||
<logger name="ca.uhn.fhir.jpa.subscription.match.matcher.subscriber.SubscriptionMatchingSubscriber" level="info"/>
|
||||
<logger name="org.springframework.web.socket.handler.ExceptionWebSocketHandlerDecorator" level="info"/>
|
||||
<logger name="ca.uhn.fhir.jpa.dao.FhirResourceDaoSubscriptionDstu2" level="info"/>
|
||||
<logger name="org.eclipse.jetty.websocket" level="info"/>
|
||||
<logger name="org.hibernate.event.internal.DefaultPersistEventListener" level="info"/>
|
||||
<logger name="org.eclipse" level="error"/>
|
||||
<logger name="ca.uhn.fhir.rest.client" level="info"/>
|
||||
<logger name="ca.uhn.fhir.jpa.dao" level="info"/>
|
||||
|
||||
<!-- set to debug to enable term expansion logs -->
|
||||
|
||||
<logger name="ca.uhn.fhir.jpa.term" level="info"/>
|
||||
<!-- Set to 'trace' to enable SQL logging -->
|
||||
<logger name="org.hibernate.SQL" level="info"/>
|
||||
<!-- Set to 'trace' to enable SQL Value logging -->
|
||||
<logger name="org.hibernate.type" level="info"/>
|
||||
|
||||
<logger name="org.springframework.test.context.cache" level="info"/>
|
||||
<logger name="ca.uhn.fhir.jpa.bulk" level="info"/>
|
||||
|
||||
<!-- more debugging -->
|
||||
<!--
|
||||
<logger name="org.elasticsearch.client" level="trace"/>
|
||||
<logger name="org.hibernate.search.elasticsearch.request" level="TRACE"/>
|
||||
<logger name="ca.uhn.fhir.jpa.model.search" level="debug"/>
|
||||
<logger name="org.elasticsearch.client" level="trace"/>
|
||||
<logger name="org.hibernate.search" level="debug"/>
|
||||
<logger name="org.hibernate.search.query" level="TRACE"/>
|
||||
<logger name="org.hibernate.search.elasticsearch.request" level="TRACE"/>
|
||||
-->
|
||||
<!-- See https://docs.jboss.org/hibernate/stable/search/reference/en-US/html_single/#backend-lucene-io-writer-infostream for lucene logging
|
||||
<logger name="org.hibernate.search.backend.lucene.infostream" level="TRACE"/> -->
|
||||
|
||||
</configuration>
|
|
@ -6,7 +6,7 @@
|
|||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>6.5.2-SNAPSHOT</version>
|
||||
<version>6.5.3-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
|
|
|
@ -88,10 +88,14 @@ public class Batch2CoordinatorIT extends BaseJpaR4Test {
|
|||
return RunOutcome.SUCCESS;
|
||||
}
|
||||
|
||||
@Override
|
||||
@BeforeEach
|
||||
public void before() {
|
||||
public void before() throws Exception {
|
||||
super.before();
|
||||
|
||||
myCompletionHandler = details -> {};
|
||||
myWorkChannel = (LinkedBlockingChannel) myChannelFactory.getOrCreateReceiver(CHANNEL_NAME, JobWorkNotificationJsonMessage.class, new ChannelConsumerSettings());
|
||||
myStorageSettings.setJobFastTrackingEnabled(true);
|
||||
}
|
||||
|
||||
@AfterEach
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue