Finish docs

This commit is contained in:
jamesagnew 2020-04-20 11:02:27 -04:00
parent c412339a05
commit 7248b4e18e
26 changed files with 772 additions and 111 deletions

View File

@ -21,6 +21,7 @@ package ca.uhn.fhir.interceptor.api;
*/ */
import java.lang.annotation.ElementType; import java.lang.annotation.ElementType;
import java.lang.annotation.Repeatable;
import java.lang.annotation.Retention; import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy; import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target; import java.lang.annotation.Target;

View File

@ -157,4 +157,4 @@ ca.uhn.fhir.jpa.partition.PartitionConfigSvcImpl.cantCreateDuplicatePartitionNam
ca.uhn.fhir.jpa.partition.PartitionConfigSvcImpl.cantDeleteDefaultPartition=Can not delete default partition ca.uhn.fhir.jpa.partition.PartitionConfigSvcImpl.cantDeleteDefaultPartition=Can not delete default partition
ca.uhn.fhir.jpa.partition.PartitionConfigSvcImpl.cantRenameDefaultPartition=Can not rename default partition ca.uhn.fhir.jpa.partition.PartitionConfigSvcImpl.cantRenameDefaultPartition=Can not rename default partition
ca.uhn.fhir.jpa.partition.RequestTenantPartitionInterceptor.unknownTenantName=Unknown tenant: {0} ca.uhn.fhir.rest.server.interceptor.partition.RequestTenantPartitionInterceptor.unknownTenantName=Unknown tenant: {0}

View File

@ -0,0 +1,133 @@
package ca.uhn.hapi.fhir.docs;
/*-
* #%L
* HAPI FHIR - Docs
* %%
* Copyright (C) 2014 - 2020 University Health Network
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import ca.uhn.fhir.interceptor.api.Hook;
import ca.uhn.fhir.interceptor.api.Interceptor;
import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.interceptor.model.PartitionId;
import ca.uhn.fhir.jpa.model.config.PartitionConfig;
import ca.uhn.fhir.rest.server.RestfulServer;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import ca.uhn.fhir.rest.server.tenant.UrlBaseTenantIdentificationStrategy;
import org.hl7.fhir.instance.model.api.IBaseResource;
import org.hl7.fhir.r4.model.Observation;
import org.hl7.fhir.r4.model.Patient;
import org.springframework.beans.factory.annotation.Autowired;
@SuppressWarnings("InnerClassMayBeStatic")
public class PartitionExamples {
public void multitenantServer() {
}
// START SNIPPET: partitionInterceptorRequestPartition
@Interceptor
public class RequestTenantPartitionInterceptor {
@Hook(Pointcut.STORAGE_PARTITION_IDENTIFY_CREATE)
public PartitionId PartitionIdentifyCreate(ServletRequestDetails theRequestDetails) {
return extractPartitionIdFromRequest(theRequestDetails);
}
@Hook(Pointcut.STORAGE_PARTITION_IDENTIFY_READ)
public PartitionId PartitionIdentifyRead(ServletRequestDetails theRequestDetails) {
return extractPartitionIdFromRequest(theRequestDetails);
}
private PartitionId extractPartitionIdFromRequest(ServletRequestDetails theRequestDetails) {
// We will use the tenant ID that came from the request as the partition name
String tenantId = theRequestDetails.getTenantId();
return PartitionId.forPartitionName(tenantId);
}
}
// END SNIPPET: partitionInterceptorRequestPartition
// START SNIPPET: partitionInterceptorHeaders
@Interceptor
public class CustomHeaderBasedPartitionInterceptor {
@Hook(Pointcut.STORAGE_PARTITION_IDENTIFY_CREATE)
public PartitionId PartitionIdentifyCreate(ServletRequestDetails theRequestDetails) {
String partitionName = theRequestDetails.getHeader("X-Partition-Name");
return PartitionId.forPartitionName(partitionName);
}
@Hook(Pointcut.STORAGE_PARTITION_IDENTIFY_READ)
public PartitionId PartitionIdentifyRead(ServletRequestDetails theRequestDetails) {
String partitionName = theRequestDetails.getHeader("X-Partition-Name");
return PartitionId.forPartitionName(partitionName);
}
}
// END SNIPPET: partitionInterceptorHeaders
// START SNIPPET: partitionInterceptorResourceContents
@Interceptor
public class ResourceTypePartitionInterceptor {
@Hook(Pointcut.STORAGE_PARTITION_IDENTIFY_CREATE)
public PartitionId PartitionIdentifyCreate(IBaseResource theResource) {
if (theResource instanceof Patient) {
return PartitionId.forPartitionName("PATIENT");
} else if (theResource instanceof Observation) {
return PartitionId.forPartitionName("OBSERVATION");
} else {
return PartitionId.forPartitionName("OTHER");
}
}
}
// END SNIPPET: partitionInterceptorResourceContents
// START SNIPPET: multitenantServer
public class MultitenantServer extends RestfulServer {
@Autowired
private PartitionConfig myPartitionConfig;
@Override
protected void initialize() {
// Enable partitioning
myPartitionConfig.setPartitioningEnabled(true);
// Set the tenant identification strategy
setTenantIdentificationStrategy(new UrlBaseTenantIdentificationStrategy());
// Use the tenant ID supplied by the tenant identification strategy
// to serve as the partitioning ID
registerInterceptor(new RequestTenantPartitionInterceptor());
// ....Register some providers and other things....
}
}
// END SNIPPET: multitenantServer
}

File diff suppressed because one or more lines are too long

Before

Width:  |  Height:  |  Size: 23 KiB

After

Width:  |  Height:  |  Size: 23 KiB

File diff suppressed because one or more lines are too long

Before

Width:  |  Height:  |  Size: 29 KiB

After

Width:  |  Height:  |  Size: 29 KiB

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 61 KiB

View File

@ -24,6 +24,16 @@ This interceptor will then produce output similar to the following:
2014-09-04 03:30:00.443 Source[127.0.0.1] Operation[search-type Organization] UA[Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1)] Params[] 2014-09-04 03:30:00.443 Source[127.0.0.1] Operation[search-type Organization] UA[Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1)] Params[]
``` ```
<a name="request-tenant-partition-interceptor"/>
# Partitioning: Multitenant Request Partition
If the JPA server has [partitioning](/docs/server_jpa/partitioning.html) enabled, the RequestTenantPartitionInterceptor can be used in combination with a [Tenant Identification Strategy](/docs/server_plain/multitenancy.html) in order to achieve a multitenant solution. See [JPA Server Partitioning](/docs/server_jpa/partitioning.html) for more information on partitioning.
* [RequestTenantPartitionInterceptor JavaDoc](/apidocs/hapi-fhir-server/ca/uhn/fhir/rest/server/interceptor/partition/RequestTenantPartitionInterceptor.html)
* [RequestTenantPartitionInterceptor Source](https://github.com/jamesagnew/hapi-fhir/blob/master/hapi-fhir-server/src/main/java/ca/uhn/fhir/rest/server/interceptor/partition/RequestTenantPartitionInterceptor.java)
# Response Customizing: Syntax Highlighting # Response Customizing: Syntax Highlighting
The ResponseHighlighterInterceptor detects when a request is coming from a browser and returns HTML with syntax highlighted XML/JSON instead of just the raw text. In other words, if a user uses a browser to request `http://foo/Patient/1` by typing this address into their URL bar, they will get a nicely formatted HTML back with a human readable version of the content. This is particularly helpful for testers and public/development APIs where users are likely to invoke the API directly to see how it works. The ResponseHighlighterInterceptor detects when a request is coming from a browser and returns HTML with syntax highlighted XML/JSON instead of just the raw text. In other words, if a user uses a browser to request `http://foo/Patient/1` by typing this address into their URL bar, they will get a nicely formatted HTML back with a human readable version of the content. This is particularly helpful for testers and public/development APIs where users are likely to invoke the API directly to see how it works.
@ -150,8 +160,8 @@ Some security audit tools require that servers return an HTTP 405 if an unsuppor
When using Subscriptions, the debug log interceptor can be used to add a number of additional lines to the server logs showing the internals of the subscription processing pipeline. When using Subscriptions, the debug log interceptor can be used to add a number of additional lines to the server logs showing the internals of the subscription processing pipeline.
* [SubscriptionDebugLogInterceptor JavaDoc](/apidocs/hapi-fhir-jpaserver-base/ca/uhn/fhir/jpa/subscription/util/SubscriptionDebugLogInterceptor.html) * [SubscriptionDebugLogInterceptor JavaDoc](/apidocs/hapi-fhir-jpaserver-subscription/ca/uhn/fhir/jpa/subscription/util/SubscriptionDebugLogInterceptor.html)
* [SubscriptionDebugLogInterceptor Source](https://github.com/jamesagnew/hapi-fhir/blob/master//hapi-fhir-jpaserver-base/ca/uhn/fhir/jpa/subscription/util/SubscriptionDebugLogInterceptor.java) * [SubscriptionDebugLogInterceptor Source](https://github.com/jamesagnew/hapi-fhir/blob/master/hapi-fhir-jpaserver-subscription/src/main/java/ca/uhn/fhir/jpa/subscription/util/SubscriptionDebugLogInterceptor.java)
# Request Pre-Processing: Override Meta.source # Request Pre-Processing: Override Meta.source

View File

@ -21,13 +21,13 @@ Partitioning involves the use of two dedicated columns to many tables within the
* **PARTITION_ID** &ndash; This is an integer indicating the specific partition that a given resource is placed in. This column can also be *NULL*, meaning that the given resource is in the **Default Partition**. * **PARTITION_ID** &ndash; This is an integer indicating the specific partition that a given resource is placed in. This column can also be *NULL*, meaning that the given resource is in the **Default Partition**.
* **PARTITION_DATE** &ndash; This is a date/time column that can be assigned an arbitrary value depending on your use case. Typically, this would be used for use cases where data should be automatically dropped after a certain time period using native database partition drops. * **PARTITION_DATE** &ndash; This is a date/time column that can be assigned an arbitrary value depending on your use case. Typically, this would be used for use cases where data should be automatically dropped after a certain time period using native database partition drops.
When partitioning is used, these two columns will be populated with the same value on all resource-specific tables (this includes [HFJ_RESOURCE](./schema.html#HFJ_RESOURCE) and all tables that have a foreign key relationship to it including [HFJ_RES_VER](./schema.html#HFJ_RES_VER), [HFJ_RESLINK](./schema.html#HFJ_RES_LINK), [HFJ_SPIDX_*](./schema.html#indexes), etc.) When partitioning is used, these two columns will be populated with the same value on all resource-specific tables (this includes [HFJ_RESOURCE](./schema.html#HFJ_RESOURCE) and all tables that have a foreign key relationship to it including [HFJ_RES_VER](./schema.html#HFJ_RES_VER), [HFJ_RESLINK](./schema.html#HFJ_RES_LINK), [HFJ_SPIDX_*](./schema.html#search-indexes), etc.)
At the time that a resource is being **created**, an [interceptor hook](#partition-iInterceptors) is invoked in order to request the partition ID and date, and these will be written to the resource. At the time that a resource is being **created**, an [interceptor hook](#partition-interceptors) is invoked in order to request the partition ID and date, and these will be written to the resource.
At the time that a resource is being **updated**, the partition ID and date from the previous version will be used. At the time that a resource is being **updated**, the partition ID and date from the previous version will be used.
When a **read operation** is being performed (e.g. a read, search, history, etc.), a separate [interceptor hook](#partition-iInterceptors) is invoked in order to determine whether the operation should target a specific partition. The outcome of this hook determines how the partitioning manifests itself to the end user: When a **read operation** is being performed (e.g. a read, search, history, etc.), a separate [interceptor hook](#partition-interceptors) is invoked in order to determine whether the operation should target a specific partition. The outcome of this hook determines how the partitioning manifests itself to the end user:
* If all read operations are scoped by the interceptor to only apply to a single partition, then the partitioning behaves as a **multitenant** solution. * If all read operations are scoped by the interceptor to only apply to a single partition, then the partitioning behaves as a **multitenant** solution.
* If read operations are scopes to all partitions, then the partitioning is simply partitioning the data into logical segments. * If read operations are scopes to all partitions, then the partitioning is simply partitioning the data into logical segments.
@ -40,7 +40,9 @@ The [PartitionConfig](/apidocs/hapi-fhir-jpaserver-model/ca/uhn/fhir/jpa/model/c
The following settings can be enabled: The following settings can be enabled:
* Include Partition in Search Hashes: * **Include Partition in Search Hashes** ([JavaDoc](/apidocs/hapi-fhir-jpaserver-model/ca/uhn/fhir/jpa/model/config/PartitionConfig.html#setIncludePartitionInSearchHashes(boolean))): If this feature is enabled, partition IDs will be factored into [Search Hashes](./schema.html#search-hashes). When this flag is not set (as is the default), when a search requests a specific partition, an additional SQL WHERE predicate is added to the query to explicitly request the given partition ID. When this flag is set, this additional WHERE predicate is not necessary since the partition is factored into the hash value being searched on. Setting this flag avoids the need to manually adjust indexes against the HFJ_SPIDX tables. Note that this flag should **not be used in environments where partitioning is being used for security purposes**, since it is possible for a user to reverse engineer false hash collisions.
* **Cross-Partition Reference Mode**: ([JavaDoc](/apidocs/hapi-fhir-jpaserver-model/ca/uhn/fhir/jpa/model/config/PartitionConfig.html#setAllowReferencesAcrossPartitions(ca.uhn.fhir.jpa.model.config.PartitionConfig.CrossPartitionReferenceMode))): This setting controls whether resources in one partition should be allowed to create references to resources in other partitions.
# Partition Interceptors # Partition Interceptors
@ -57,14 +59,54 @@ The criteria for determining the partition will depend on your use case. For exa
* If you are implementing multi-tenancy the partition might be determined by using the [Request Tenant ID](/docs/server_plain/multitenancy.html). It could also be determined by looking at request headers, or the authorized user/session context, etc. * If you are implementing multi-tenancy the partition might be determined by using the [Request Tenant ID](/docs/server_plain/multitenancy.html). It could also be determined by looking at request headers, or the authorized user/session context, etc.
* If you are implementing segmented data partitioning, the partition might be determined by examining the actual resource being created, by the identity of the sending system, etc. * If you are implementing segmented data partitioning, the partition might be determined by examining the actpartitionInterceptorHeadersual resource being created, by the identity of the sending system, etc.
## Identify Partition for Read (Optional) ## Identify Partition for Read (Optional)
A hook against the [`Pointcut.STORAGE_PARTITION_IDENTIFY_CREATE`](/apidocs/hapi-fhir-base/ca/uhn/fhir/interceptor/api/Pointcut.html#STORAGE_PARTITION_IDENTIFY_CREATE) pointcut must be registered, and this hook method will be invoked every time a resource is being created in order to determine the partition to create the resource in. A hook against the [`Pointcut.STORAGE_PARTITION_IDENTIFY_CREATE`](/apidocs/hapi-fhir-base/ca/uhn/fhir/interceptor/api/Pointcut.html#STORAGE_PARTITION_IDENTIFY_CREATE) pointcut must be registered, and this hook method will be invoked every time a resource is being created in order to determine the partition to create the resource in.
## Example: Using Request Tenants ## Example: Partitioning based on Tenant ID
The [RequestTenantPartitionInterceptor](/docs/interceptors/built_in_server_interceptors.html#request-tenant-partition-interceptor) uses the request tenant ID to determine the partition name. A simplified version of its source is shown below:
```java
{{snippet:classpath:/ca/uhn/hapi/fhir/docs/PartitionExamples.java|partitionInterceptorHeaders}}
```
## Example: Partitioning based on headers
If requests are coming from a trusted system, that system might be relied on to determine the partition for reads and writes.
The following example shows a simple partition interceptor that determines the partition name by looking at a custom HTTP header:
```java
{{snippet:classpath:/ca/uhn/hapi/fhir/docs/PartitionExamples.java|partitionInterceptorHeaders}}
```
## Example: Using Resource Contents
When creating resources, the contents of the resource can also be factored into the decision on which tenant to use. The following example shows a very simple algorithm, placing resources into one of three partitions based on the resource type. Other contents in the resource could also be used instead.
```java
{{snippet:classpath:/ca/uhn/hapi/fhir/docs/PartitionExamples.java|partitionInterceptorResourceContents}}
```
# Complete Example: Using Request Tenants
In order to achieve a multitenant configuration, the following configuration steps must be taken:
* Partitioning must be enabled.
* A [Tenant Identification Strategy](/docs/server_plain/multitenancy.html) must be enabled on the RestfulServer.
* A [RequestTenantPartitionInterceptor](/docs/interceptors/built_in_server_interceptors.html#request-tenant-partition-interceptor) instance must be registered as an interceptor.
Additionally, indexes will likely need to be tuned in order to support the partition-aware queries.
The following snippet shows a server with this configuration.
```java
{{snippet:classpath:/ca/uhn/hapi/fhir/docs/PartitionExamples.java|multitenantServer}}
```

View File

@ -71,7 +71,7 @@ The HFJ_RESOURCE table indicates a single resource of any type in the database.
<td></td> <td></td>
<td> <td>
This column contains the FHIR version associated with this resource, using a constant drawn This column contains the FHIR version associated with this resource, using a constant drawn
from <a href="/apidocs/hapi-fhir-base/ca/uhn/fhir/context/FhirVersionEnum.html">FhirVersionEnum</a>. from <a href="/hapi-fhir/apidocs/hapi-fhir-base/ca/uhn/fhir/context/FhirVersionEnum.html">FhirVersionEnum</a>.
Not to be confused with <b>RES_VER</b> above. Not to be confused with <b>RES_VER</b> above.
</td> </td>
</tr> </tr>
@ -407,9 +407,119 @@ When a resource is created or updated, it is indexed for searching. Any search p
</tbody> </tbody>
</table> </table>
<a name="indexes"/> <a name="search-indexes"/>
# Background: Search Indexes # Background: Search Indexes
The HFJ_SPIDX (Search Parameter Index) tables are used to index resources for searching. When a resource is created or updated, a set of rows in these tables will be added. These are used for finding appropriate rows to return when performing FHIR searches. The HFJ_SPIDX (Search Parameter Index) tables are used to index resources for searching. When a resource is created or updated, a set of rows in these tables will be added. These are used for finding appropriate rows to return when performing FHIR searches. There are dedicated tables for supporting each of the non-reference [FHIR Search Datatypes](http://hl7.org/fhir/search.html): Date, Number, Quantity, String, Token, and URI. Note that Reference search parameters are implemented using the [HFJ_RES_LINK](#HFJ_RES_LINK) table above.
<a name="search-hashes"/>
## Search Hashes
The SPIDX tables leverage "hash columns", which contain a hash of multiple columns in order to reduce index size and improve search performance. Hashes currently use the [MurmurHash3_x64_128](https://en.wikipedia.org/wiki/MurmurHash) hash algorithm, keeping only the first 64 bits in order to produce a LongInt value.
For example, all search index tables have columns for storing the search parameter name (**SP_NAME**) and resource type (**RES_TYPE**). An additional column which hashes these two values is provided, called **HASH_IDENTITY**.
In some configurations, the partition ID is also factored into the hashes.
## Tables
<img src="/hapi-fhir/docs/images/jpa_erd_search_indexes.svg" alt="Search Indexes" style="width: 100%; max-width: 900px;"/>
## Columns
The following columns are common to **all HFJ_SPIDX_xxx tables**.
<table class="table table-striped table-condensed">
<thead>
<tr>
<th>Name</th>
<th>Relationships</th>
<th>Datatype</th>
<th>Nullable</th>
<th>Description</th>
</tr>
</thead>
<tbody>
<tr>
<td>PARTITION_ID</td>
<td></td>
<td>Integer</td>
<td>Nullable</td>
<td>
This is the optional partition ID, if the resource is in a partition. See <a href="./partitioning.html">Partitioning</a>.
Note that the partition indicated by the <b>PARTITION_ID</b> and <b>PARTITION_DATE</b> columns refers to the partition
of the <i>SOURCE</i> resource, and not necessarily the <i>TARGET</i>.
</td>
</tr>
<tr>
<td>PARTITION_DATE</td>
<td></td>
<td>Timestamp</td>
<td>Nullable</td>
<td>
This is the optional partition date, if the resource is in a partition. See <a href="./partitioning.html">Partitioning</a>.
Note that the partition indicated by the <b>PARTITION_ID</b> and <b>PARTITION_DATE</b> columns refers to the partition
of the <i>SOURCE</i> resource, and not necessarily the <i>TARGET</i>.
</td>
</tr>
<tr>
<td>SP_ID</td>
<td></td>
<td>Long</td>
<td></td>
<td>
Holds the persistent ID
</td>
</tr>
<tr>
<td>RES_ID</td>
<td>FK to <a href="#HFJ_RESOURCE">HFJ_RESOURCE</a></td>
<td>String</td>
<td></td>
<td>
Contains the PID of the resource being indexed.
</td>
</tr>
<tr>
<td>SP_NAME</td>
<td></td>
<td>String</td>
<td></td>
<td>
This is the name of the search parameter being indexed.
</td>
</tr>
<tr>
<td>RES_TYPE</td>
<td></td>
<td>String</td>
<td></td>
<td>
This is the name of the resource being indexed.
</td>
</tr>
<tr>
<td>SP_UPDATED</td>
<td></td>
<td>Timestamp</td>
<td></td>
<td>
This is the time that this row was last updated.
</td>
</tr>
<tr>
<td>SP_MISSING</td>
<td></td>
<td>boolean</td>
<td></td>
<td>
If this row represents a search parameter that is **not** populated at all in the resource being indexed,
this will be populated with the value `true`. Otherwise it will be populated with `false`.
</td>
</tr>
</tbody>
</table>

View File

@ -22,7 +22,7 @@ import ca.uhn.fhir.jpa.entity.Search;
import ca.uhn.fhir.jpa.graphql.JpaStorageServices; import ca.uhn.fhir.jpa.graphql.JpaStorageServices;
import ca.uhn.fhir.jpa.interceptor.JpaConsentContextServices; import ca.uhn.fhir.jpa.interceptor.JpaConsentContextServices;
import ca.uhn.fhir.jpa.model.sched.ISchedulerService; import ca.uhn.fhir.jpa.model.sched.ISchedulerService;
import ca.uhn.fhir.jpa.partition.RequestTenantPartitionInterceptor; import ca.uhn.fhir.rest.server.interceptor.partition.RequestTenantPartitionInterceptor;
import ca.uhn.fhir.jpa.provider.SubscriptionTriggeringProvider; import ca.uhn.fhir.jpa.provider.SubscriptionTriggeringProvider;
import ca.uhn.fhir.jpa.provider.TerminologyUploaderProvider; import ca.uhn.fhir.jpa.provider.TerminologyUploaderProvider;
import ca.uhn.fhir.jpa.sched.AutowiringSpringBeanJobFactory; import ca.uhn.fhir.jpa.sched.AutowiringSpringBeanJobFactory;
@ -42,8 +42,6 @@ import ca.uhn.fhir.jpa.search.reindex.IResourceReindexingSvc;
import ca.uhn.fhir.jpa.search.reindex.ResourceReindexingSvcImpl; import ca.uhn.fhir.jpa.search.reindex.ResourceReindexingSvcImpl;
import ca.uhn.fhir.jpa.searchparam.config.SearchParamConfig; import ca.uhn.fhir.jpa.searchparam.config.SearchParamConfig;
import ca.uhn.fhir.jpa.searchparam.extractor.IResourceLinkResolver; import ca.uhn.fhir.jpa.searchparam.extractor.IResourceLinkResolver;
import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry;
import ca.uhn.fhir.jpa.searchparam.registry.SearchParamRegistryImpl;
import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.server.interceptor.consent.IConsentContextServices; import ca.uhn.fhir.rest.server.interceptor.consent.IConsentContextServices;
import org.hibernate.jpa.HibernatePersistenceProvider; import org.hibernate.jpa.HibernatePersistenceProvider;

View File

@ -24,6 +24,7 @@ import ca.uhn.fhir.interceptor.model.PartitionId;
import ca.uhn.fhir.jpa.api.config.DaoConfig; import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.dao.data.IForcedIdDao; import ca.uhn.fhir.jpa.dao.data.IForcedIdDao;
import ca.uhn.fhir.jpa.dao.index.IdHelperService; import ca.uhn.fhir.jpa.dao.index.IdHelperService;
import ca.uhn.fhir.jpa.model.config.PartitionConfig;
import ca.uhn.fhir.jpa.model.cross.ResourcePersistentId; import ca.uhn.fhir.jpa.model.cross.ResourcePersistentId;
import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperService; import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperService;
@ -283,6 +284,9 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc {
@Autowired @Autowired
private IRequestPartitionHelperService myRequestPartitionHelperService; private IRequestPartitionHelperService myRequestPartitionHelperService;
@Autowired
private PartitionConfig myPartitionConfig;
@Transactional() @Transactional()
@Override @Override
public List<Suggestion> suggestKeywords(String theContext, String theSearchParam, String theText, RequestDetails theRequest) { public List<Suggestion> suggestKeywords(String theContext, String theSearchParam, String theText, RequestDetails theRequest) {
@ -297,8 +301,10 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc {
throw new InvalidRequestException("Invalid context: " + theContext); throw new InvalidRequestException("Invalid context: " + theContext);
} }
// FIXME: this method should require a resource type // Partitioning is not supported for this operation
PartitionId partitionId = myRequestPartitionHelperService.determineReadPartitionForRequest(theRequest, null); Validate.isTrue(myPartitionConfig.isPartitioningEnabled() == false, "Suggest keywords not supported for partitioned system");
PartitionId partitionId = null;
ResourcePersistentId pid = myIdHelperService.resolveResourcePersistentIds(partitionId, contextParts[0], contextParts[1]); ResourcePersistentId pid = myIdHelperService.resolveResourcePersistentIds(partitionId, contextParts[0], contextParts[1]);
FullTextEntityManager em = org.hibernate.search.jpa.Search.getFullTextEntityManager(myEntityManager); FullTextEntityManager em = org.hibernate.search.jpa.Search.getFullTextEntityManager(myEntityManager);
@ -315,7 +321,6 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc {
.sentence(theText.toLowerCase()).createQuery(); .sentence(theText.toLowerCase()).createQuery();
Query query = qb.bool() Query query = qb.bool()
// .must(qb.keyword().onField("myResourceLinks.myTargetResourcePid").matching(pid).createQuery())
.must(qb.keyword().onField("myResourceLinksField").matching(pid.toString()).createQuery()) .must(qb.keyword().onField("myResourceLinksField").matching(pid.toString()).createQuery())
.must(textQuery) .must(textQuery)
.createQuery(); .createQuery();

View File

@ -360,7 +360,11 @@ public class SearchBuilder implements ISearchBuilder {
} }
myQueryRoot.addPredicate(myCriteriaBuilder.isNull(myQueryRoot.get("myDeleted"))); myQueryRoot.addPredicate(myCriteriaBuilder.isNull(myQueryRoot.get("myDeleted")));
if (myPartitionId != null) { if (myPartitionId != null) {
myQueryRoot.addPredicate(myCriteriaBuilder.equal(myQueryRoot.get("myPartitionIdValue"), myPartitionId.getPartitionId())); if (myPartitionId.getPartitionId() != null) {
myQueryRoot.addPredicate(myCriteriaBuilder.equal(myQueryRoot.get("myPartitionIdValue").as(Integer.class), myPartitionId.getPartitionId()));
} else {
myQueryRoot.addPredicate(myCriteriaBuilder.isNull(myQueryRoot.get("myPartitionIdValue").as(Integer.class)));
}
} }
} }

View File

@ -64,11 +64,11 @@ public class DaoResourceLinkResolver implements IResourceLinkResolver {
private DaoRegistry myDaoRegistry; private DaoRegistry myDaoRegistry;
@Override @Override
public IResourceLookup findTargetResource(PartitionId thePartitionId, RuntimeSearchParam theSearchParam, String theSourcePath, IIdType theSourceResourceId, String theTypeString, Class<? extends IBaseResource> theType, IBaseReference theReference, RequestDetails theRequest) { public IResourceLookup findTargetResource(PartitionId thePartitionId, RuntimeSearchParam theSearchParam, String theSourcePath, IIdType theSourceResourceId, String theResourceType, Class<? extends IBaseResource> theType, IBaseReference theReference, RequestDetails theRequest) {
IResourceLookup resolvedResource; IResourceLookup resolvedResource;
String idPart = theSourceResourceId.getIdPart(); String idPart = theSourceResourceId.getIdPart();
try { try {
resolvedResource = myIdHelperService.resolveResourceIdentity(thePartitionId, theTypeString, idPart, theRequest); resolvedResource = myIdHelperService.resolveResourceIdentity(thePartitionId, theResourceType, idPart, theRequest);
ourLog.trace("Translated {}/{} to resource PID {}", theType, idPart, resolvedResource); ourLog.trace("Translated {}/{} to resource PID {}", theType, idPart, resolvedResource);
} catch (ResourceNotFoundException e) { } catch (ResourceNotFoundException e) {
@ -89,8 +89,8 @@ public class DaoResourceLinkResolver implements IResourceLinkResolver {
} }
ourLog.trace("Resolved resource of type {} as PID: {}", resolvedResource.getResourceType(), resolvedResource.getResourceId()); ourLog.trace("Resolved resource of type {} as PID: {}", resolvedResource.getResourceType(), resolvedResource.getResourceId());
if (!theTypeString.equals(resolvedResource.getResourceType())) { if (!theResourceType.equals(resolvedResource.getResourceType())) {
ourLog.error("Resource with PID {} was of type {} and wanted {}", resolvedResource.getResourceId(), theTypeString, resolvedResource.getResourceType()); ourLog.error("Resource with PID {} was of type {} and wanted {}", resolvedResource.getResourceId(), theResourceType, resolvedResource.getResourceType());
throw new UnprocessableEntityException("Resource contains reference to unknown resource ID " + theSourceResourceId.getValue()); throw new UnprocessableEntityException("Resource contains reference to unknown resource ID " + theSourceResourceId.getValue());
} }
@ -99,7 +99,7 @@ public class DaoResourceLinkResolver implements IResourceLinkResolver {
throw new InvalidRequestException("Resource " + resName + "/" + idPart + " is deleted, specified in path: " + theSourcePath); throw new InvalidRequestException("Resource " + resName + "/" + idPart + " is deleted, specified in path: " + theSourcePath);
} }
if (!theSearchParam.hasTargets() && theSearchParam.getTargets().contains(theTypeString)) { if (!theSearchParam.hasTargets() && theSearchParam.getTargets().contains(theResourceType)) {
return null; return null;
} }

View File

@ -21,9 +21,7 @@ package ca.uhn.fhir.jpa.dao.index;
*/ */
import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.FhirContext;
import ca.uhn.fhir.interceptor.api.HookParams;
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster; import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.interceptor.model.PartitionId; import ca.uhn.fhir.interceptor.model.PartitionId;
import ca.uhn.fhir.jpa.api.config.DaoConfig; import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.dao.data.IForcedIdDao; import ca.uhn.fhir.jpa.dao.data.IForcedIdDao;
@ -32,13 +30,10 @@ import ca.uhn.fhir.jpa.model.cross.IResourceLookup;
import ca.uhn.fhir.jpa.model.cross.ResourceLookup; import ca.uhn.fhir.jpa.model.cross.ResourceLookup;
import ca.uhn.fhir.jpa.model.cross.ResourcePersistentId; import ca.uhn.fhir.jpa.model.cross.ResourcePersistentId;
import ca.uhn.fhir.jpa.model.entity.ForcedId; import ca.uhn.fhir.jpa.model.entity.ForcedId;
import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage;
import ca.uhn.fhir.jpa.util.JpaInterceptorBroadcaster;
import ca.uhn.fhir.model.primitive.IdDt; import ca.uhn.fhir.model.primitive.IdDt;
import ca.uhn.fhir.rest.api.server.RequestDetails; import ca.uhn.fhir.rest.api.server.RequestDetails;
import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException; import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException;
import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import com.github.benmanes.caffeine.cache.Cache; import com.github.benmanes.caffeine.cache.Cache;
import com.github.benmanes.caffeine.cache.Caffeine; import com.github.benmanes.caffeine.cache.Caffeine;
import com.google.common.collect.ListMultimap; import com.google.common.collect.ListMultimap;
@ -68,6 +63,7 @@ import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import static org.apache.commons.lang3.StringUtils.isBlank; import static org.apache.commons.lang3.StringUtils.isBlank;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
/** /**
* This class is used to convert between PIDs (the internal primary key for a particular resource as * This class is used to convert between PIDs (the internal primary key for a particular resource as
@ -122,9 +118,9 @@ public class IdHelperService {
* @throws ResourceNotFoundException If the ID can not be found * @throws ResourceNotFoundException If the ID can not be found
*/ */
@Nonnull @Nonnull
public IResourceLookup resolveResourceIdentity(PartitionId thePartitionId, String theResourceName, String theResourceId, RequestDetails theRequestDetails) throws ResourceNotFoundException { public IResourceLookup resolveResourceIdentity(PartitionId thePartitionId, String theResourceType, String theResourceId, RequestDetails theRequestDetails) throws ResourceNotFoundException {
// We only pass 1 input in so only 0..1 will come back // We only pass 1 input in so only 0..1 will come back
IdDt id = new IdDt(theResourceName, theResourceId); IdDt id = new IdDt(theResourceType, theResourceId);
Collection<IResourceLookup> matches = translateForcedIdToPids(thePartitionId, theRequestDetails, Collections.singletonList(id)); Collection<IResourceLookup> matches = translateForcedIdToPids(thePartitionId, theRequestDetails, Collections.singletonList(id));
assert matches.size() <= 1; assert matches.size() <= 1;
if (matches.isEmpty()) { if (matches.isEmpty()) {
@ -344,22 +340,16 @@ public class IdHelperService {
if (nextIds.size() > 0) { if (nextIds.size() > 0) {
Collection<Object[]> views; Collection<Object[]> views;
if (isBlank(nextResourceType)) { assert isNotBlank(nextResourceType);
warnAboutUnqualifiedForcedIdResolution(theRequest);
// FIXME: deal with partition here if (thePartitionId != null) {
views = myForcedIdDao.findAndResolveByForcedIdWithNoType(nextIds); if (thePartitionId.getPartitionId() != null) {
views = myForcedIdDao.findAndResolveByForcedIdWithNoTypeInPartition(nextResourceType, nextIds, thePartitionId.getPartitionId());
} else {
if (thePartitionId != null) {
if (thePartitionId.getPartitionId() != null) {
views = myForcedIdDao.findAndResolveByForcedIdWithNoTypeInPartition(nextResourceType, nextIds, thePartitionId.getPartitionId());
} else {
views = myForcedIdDao.findAndResolveByForcedIdWithNoTypeInPartitionNull(nextResourceType, nextIds);
}
} else { } else {
views = myForcedIdDao.findAndResolveByForcedIdWithNoType(nextResourceType, nextIds); views = myForcedIdDao.findAndResolveByForcedIdWithNoTypeInPartitionNull(nextResourceType, nextIds);
} }
} else {
views = myForcedIdDao.findAndResolveByForcedIdWithNoType(nextResourceType, nextIds);
} }
for (Object[] next : views) { for (Object[] next : views) {
@ -382,17 +372,6 @@ public class IdHelperService {
return retVal; return retVal;
} }
private void warnAboutUnqualifiedForcedIdResolution(RequestDetails theRequest) {
StorageProcessingMessage msg = new StorageProcessingMessage()
.setMessage("This search uses unqualified resource IDs (an ID without a resource type). This is less efficient than using a qualified type.");
ourLog.debug(msg.getMessage());
HookParams params = new HookParams()
.add(RequestDetails.class, theRequest)
.addIfMatchesType(ServletRequestDetails.class, theRequest)
.add(StorageProcessingMessage.class, msg);
JpaInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequest, Pointcut.JPA_PERFTRACE_WARNING, params);
}
public void clearCache() { public void clearCache() {
myPersistentIdCache.invalidateAll(); myPersistentIdCache.invalidateAll();
myResourceLookupCache.invalidateAll(); myResourceLookupCache.invalidateAll();

View File

@ -27,7 +27,6 @@ import ca.uhn.fhir.jpa.dao.SearchBuilder;
import ca.uhn.fhir.jpa.model.config.PartitionConfig; import ca.uhn.fhir.jpa.model.config.PartitionConfig;
import ca.uhn.fhir.jpa.model.entity.BasePartitionable; import ca.uhn.fhir.jpa.model.entity.BasePartitionable;
import ca.uhn.fhir.jpa.model.entity.BaseResourceIndexedSearchParam; import ca.uhn.fhir.jpa.model.entity.BaseResourceIndexedSearchParam;
import ca.uhn.fhir.jpa.model.entity.PartitionablePartitionId;
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamDate; import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamDate;
import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.model.entity.SearchParamPresent; import ca.uhn.fhir.jpa.model.entity.SearchParamPresent;
@ -146,16 +145,23 @@ abstract class BasePredicateBuilder {
} }
Predicate combineParamIndexPredicateWithParamNamePredicate(String theResourceName, String theParamName, From<?, ? extends BaseResourceIndexedSearchParam> theFrom, Predicate thePredicate, PartitionId thePartitionId) { Predicate combineParamIndexPredicateWithParamNamePredicate(String theResourceName, String theParamName, From<?, ? extends BaseResourceIndexedSearchParam> theFrom, Predicate thePredicate, PartitionId thePartitionId) {
List<Predicate> andPredicates = new ArrayList<>();
addPartitionIdPredicate(thePartitionId, theFrom, andPredicates);
if (myDontUseHashesForSearch) { if (myDontUseHashesForSearch) {
Predicate resourceTypePredicate = myCriteriaBuilder.equal(theFrom.get("myResourceType"), theResourceName); Predicate resourceTypePredicate = myCriteriaBuilder.equal(theFrom.get("myResourceType"), theResourceName);
Predicate paramNamePredicate = myCriteriaBuilder.equal(theFrom.get("myParamName"), theParamName); Predicate paramNamePredicate = myCriteriaBuilder.equal(theFrom.get("myParamName"), theParamName);
Predicate outerPredicate = myCriteriaBuilder.and(resourceTypePredicate, paramNamePredicate, thePredicate); andPredicates.add(resourceTypePredicate);
return outerPredicate; andPredicates.add(paramNamePredicate);
andPredicates.add(thePredicate);
} else {
long hashIdentity = BaseResourceIndexedSearchParam.calculateHashIdentity(myPartitionConfig, thePartitionId, theResourceName, theParamName);
Predicate hashIdentityPredicate = myCriteriaBuilder.equal(theFrom.get("myHashIdentity"), hashIdentity);
andPredicates.add(hashIdentityPredicate);
andPredicates.add(thePredicate);
} }
long hashIdentity = BaseResourceIndexedSearchParam.calculateHashIdentity(myPartitionConfig, thePartitionId, theResourceName, theParamName); return myCriteriaBuilder.and(toArray(andPredicates));
Predicate hashIdentityPredicate = myCriteriaBuilder.equal(theFrom.get("myHashIdentity"), hashIdentity);
return myCriteriaBuilder.and(hashIdentityPredicate, thePredicate);
} }
public PartitionConfig getPartitionConfig() { public PartitionConfig getPartitionConfig() {
@ -217,7 +223,7 @@ abstract class BasePredicateBuilder {
return combineParamIndexPredicateWithParamNamePredicate(theResourceName, theParamName, theFrom, num, thePartitionId); return combineParamIndexPredicateWithParamNamePredicate(theResourceName, theParamName, theFrom, num, thePartitionId);
} }
void addPartitionIdPredicate(PartitionId thePartitionId, Join<ResourceTable, ? extends BasePartitionable> theJoin, List<Predicate> theCodePredicates) { void addPartitionIdPredicate(PartitionId thePartitionId, From<?, ? extends BasePartitionable> theJoin, List<Predicate> theCodePredicates) {
if (thePartitionId != null) { if (thePartitionId != null) {
Integer partitionId = thePartitionId.getPartitionId(); Integer partitionId = thePartitionId.getPartitionId();
Predicate partitionPredicate; Predicate partitionPredicate;

View File

@ -82,17 +82,13 @@ public class PredicateBuilderDate extends BasePredicateBuilder implements IPredi
} }
List<Predicate> codePredicates = new ArrayList<>(); List<Predicate> codePredicates = new ArrayList<>();
addPartitionIdPredicate(thePartitionId, join, codePredicates);
for (IQueryParameterType nextOr : theList) { for (IQueryParameterType nextOr : theList) {
IQueryParameterType params = nextOr; Predicate p = createPredicateDate(nextOr,
Predicate p = createPredicateDate(params,
theResourceName,
theParamName,
myCriteriaBuilder, myCriteriaBuilder,
join, join,
operation, operation
thePartitionId); );
codePredicates.add(p); codePredicates.add(p);
} }
@ -116,22 +112,17 @@ public class PredicateBuilderDate extends BasePredicateBuilder implements IPredi
From<?, ResourceIndexedSearchParamDate> theFrom, From<?, ResourceIndexedSearchParamDate> theFrom,
PartitionId thePartitionId) { PartitionId thePartitionId) {
Predicate predicateDate = createPredicateDate(theParam, Predicate predicateDate = createPredicateDate(theParam,
theResourceName,
theParamName,
theBuilder, theBuilder,
theFrom, theFrom,
null, null
thePartitionId); );
return combineParamIndexPredicateWithParamNamePredicate(theResourceName, theParamName, theFrom, predicateDate, thePartitionId); return combineParamIndexPredicateWithParamNamePredicate(theResourceName, theParamName, theFrom, predicateDate, thePartitionId);
} }
private Predicate createPredicateDate(IQueryParameterType theParam, private Predicate createPredicateDate(IQueryParameterType theParam,
String theResourceName,
String theParamName,
CriteriaBuilder theBuilder, CriteriaBuilder theBuilder,
From<?, ResourceIndexedSearchParamDate> theFrom, From<?, ResourceIndexedSearchParamDate> theFrom,
SearchFilterParser.CompareOperation theOperation, SearchFilterParser.CompareOperation theOperation) {
PartitionId thePartitionId) {
Predicate p; Predicate p;
if (theParam instanceof DateParam) { if (theParam instanceof DateParam) {
@ -159,15 +150,14 @@ public class PredicateBuilderDate extends BasePredicateBuilder implements IPredi
return p; return p;
} }
// FIXME: does this need a partition ID?
private Predicate createPredicateDateFromRange(CriteriaBuilder theBuilder, private Predicate createPredicateDateFromRange(CriteriaBuilder theBuilder,
From<?, ResourceIndexedSearchParamDate> theFrom, From<?, ResourceIndexedSearchParamDate> theFrom,
DateRangeParam theRange, DateRangeParam theRange,
SearchFilterParser.CompareOperation operation) { SearchFilterParser.CompareOperation operation) {
Date lowerBound = theRange.getLowerBoundAsInstant(); Date lowerBound = theRange.getLowerBoundAsInstant();
Date upperBound = theRange.getUpperBoundAsInstant(); Date upperBound = theRange.getUpperBoundAsInstant();
Predicate lt = null; Predicate lt;
Predicate gt = null; Predicate gt;
Predicate lb = null; Predicate lb = null;
Predicate ub = null; Predicate ub = null;

View File

@ -77,6 +77,7 @@ import org.springframework.context.annotation.Scope;
import org.springframework.stereotype.Component; import org.springframework.stereotype.Component;
import javax.annotation.Nonnull; import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import javax.persistence.criteria.From; import javax.persistence.criteria.From;
import javax.persistence.criteria.Join; import javax.persistence.criteria.Join;
import javax.persistence.criteria.JoinType; import javax.persistence.criteria.JoinType;
@ -193,6 +194,12 @@ class PredicateBuilderReference extends BasePredicateBuilder {
List<Predicate> codePredicates = new ArrayList<>(); List<Predicate> codePredicates = new ArrayList<>();
addPartitionIdPredicate(thePartitionId, join, codePredicates); addPartitionIdPredicate(thePartitionId, join, codePredicates);
for (IIdType next : targetIds) {
if (!next.hasResourceType()) {
warnAboutPerformanceOnUnqualifiedResources(theParamName, theRequest, null);
}
}
// Resources by ID // Resources by ID
List<ResourcePersistentId> targetPids = myIdHelperService.resolveResourcePersistentIdsWithCache(thePartitionId, targetIds, theRequest); List<ResourcePersistentId> targetPids = myIdHelperService.resolveResourcePersistentIdsWithCache(thePartitionId, targetIds, theRequest);
if (!targetPids.isEmpty()) { if (!targetPids.isEmpty()) {
@ -419,13 +426,20 @@ class PredicateBuilderReference extends BasePredicateBuilder {
return predicate; return predicate;
} }
private void warnAboutPerformanceOnUnqualifiedResources(String theParamName, RequestDetails theRequest, List<Class<? extends IBaseResource>> theCandidateTargetTypes) { private void warnAboutPerformanceOnUnqualifiedResources(String theParamName, RequestDetails theRequest, @Nullable List<Class<? extends IBaseResource>> theCandidateTargetTypes) {
String message = new StringBuilder() StringBuilder builder = new StringBuilder();
.append("This search uses an unqualified resource(a parameter in a chain without a resource type). ") builder.append("This search uses an unqualified resource(a parameter in a chain without a resource type). ");
.append("This is less efficient than using a qualified type. ") builder.append("This is less efficient than using a qualified type. ");
.append("[" + theParamName + "] resolves to [" + theCandidateTargetTypes.stream().map(Class::getSimpleName).collect(Collectors.joining(",")) + "].") if (theCandidateTargetTypes != null) {
.append("If you know what you're looking for, try qualifying it like this: ") builder.append("[" + theParamName + "] resolves to [" + theCandidateTargetTypes.stream().map(Class::getSimpleName).collect(Collectors.joining(",")) + "].");
.append(theCandidateTargetTypes.stream().map(cls -> "[" + cls.getSimpleName() + ":" + theParamName + "]").collect(Collectors.joining(" or "))) builder.append("If you know what you're looking for, try qualifying it using the form ");
builder.append(theCandidateTargetTypes.stream().map(cls -> "[" + cls.getSimpleName() + ":" + theParamName + "]").collect(Collectors.joining(" or ")));
} else {
builder.append("If you know what you're looking for, try qualifying it using the form: '");
builder.append(theParamName).append(":[resourceType]");
builder.append("'");
}
String message = builder
.toString(); .toString();
StorageProcessingMessage msg = new StorageProcessingMessage() StorageProcessingMessage msg = new StorageProcessingMessage()
.setMessage(message); .setMessage(message);

View File

@ -134,7 +134,6 @@ class PredicateBuilderTag extends BasePredicateBuilder {
continue; continue;
} }
// FIXME: add test for :missing
if (paramInverted) { if (paramInverted) {
ourLog.debug("Searching for _tag:not"); ourLog.debug("Searching for _tag:not");
@ -160,6 +159,11 @@ class PredicateBuilderTag extends BasePredicateBuilder {
defJoin.where(tagListPredicate); defJoin.where(tagListPredicate);
continue; continue;
} else {
myQueryRoot.setHasIndexJoins();
} }
Join<ResourceTable, ResourceTag> tagJoin = myQueryRoot.join("myTags", JoinType.LEFT); Join<ResourceTable, ResourceTag> tagJoin = myQueryRoot.join("myTags", JoinType.LEFT);
@ -172,7 +176,6 @@ class PredicateBuilderTag extends BasePredicateBuilder {
addPartitionIdPredicate(thePartitionId, tagJoin, predicates); addPartitionIdPredicate(thePartitionId, tagJoin, predicates);
} }
myQueryRoot.setHasIndexJoins();
myQueryRoot.addPredicates(predicates); myQueryRoot.addPredicates(predicates);
} }

View File

@ -104,10 +104,6 @@ public class TestUtil {
Subselect subselect = theClazz.getAnnotation(Subselect.class); Subselect subselect = theClazz.getAnnotation(Subselect.class);
boolean isView = (subselect != null); boolean isView = (subselect != null);
// FIXME: remove?
Embeddable embeddable = theClazz.getAnnotation(Embeddable.class);
boolean isEmbeddable = (embeddable != null);
scan(theClazz, theNames, theIsSuperClass, isView); scan(theClazz, theNames, theIsSuperClass, isView);
for (Field nextField : theClazz.getDeclaredFields()) { for (Field nextField : theClazz.getDeclaredFields()) {

View File

@ -1,6 +1,9 @@
package ca.uhn.fhir.jpa.dao.r4; package ca.uhn.fhir.jpa.dao.r4;
import ca.uhn.fhir.context.RuntimeResourceDefinition; import ca.uhn.fhir.context.RuntimeResourceDefinition;
import ca.uhn.fhir.interceptor.api.HookParams;
import ca.uhn.fhir.interceptor.api.IAnonymousInterceptor;
import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.jpa.api.config.DaoConfig; import ca.uhn.fhir.jpa.api.config.DaoConfig;
import ca.uhn.fhir.jpa.entity.Search; import ca.uhn.fhir.jpa.entity.Search;
import ca.uhn.fhir.jpa.model.config.PartitionConfig; import ca.uhn.fhir.jpa.model.config.PartitionConfig;
@ -12,6 +15,7 @@ import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamToken;
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamUri; import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamUri;
import ca.uhn.fhir.jpa.model.entity.ResourceLink; import ca.uhn.fhir.jpa.model.entity.ResourceLink;
import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.model.entity.ResourceTable;
import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage;
import ca.uhn.fhir.jpa.searchparam.MatchUrlService; import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap.EverythingModeEnum; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap.EverythingModeEnum;
@ -46,6 +50,8 @@ import org.junit.AfterClass;
import org.junit.Before; import org.junit.Before;
import org.junit.Ignore; import org.junit.Ignore;
import org.junit.Test; import org.junit.Test;
import org.mockito.ArgumentCaptor;
import org.mockito.ArgumentMatchers;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.transaction.TransactionStatus; import org.springframework.transaction.TransactionStatus;
import org.springframework.transaction.support.TransactionCallback; import org.springframework.transaction.support.TransactionCallback;
@ -81,6 +87,8 @@ import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail; import static org.junit.Assert.fail;
import static org.mockito.Mockito.mock; import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
@SuppressWarnings({"unchecked", "Duplicates"}) @SuppressWarnings({"unchecked", "Duplicates"})
public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test { public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
@ -3507,6 +3515,44 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
} }
} }
@Test
public void testSearchReferenceUntyped() {
Patient p = new Patient();
p.setActive(true);
p.setId("PAT");
myPatientDao.update(p);
AuditEvent audit = new AuditEvent();
audit.setId("AUDIT");
audit.addEntity().getWhat().setReference("Patient/PAT");
myAuditEventDao.update(audit);
IAnonymousInterceptor interceptor = mock(IAnonymousInterceptor.class);
try {
myInterceptorRegistry.registerAnonymousInterceptor(Pointcut.JPA_PERFTRACE_WARNING, interceptor);
myCaptureQueriesListener.clear();
SearchParameterMap map = new SearchParameterMap();
map.setLoadSynchronous(true);
map.add(AuditEvent.SP_ENTITY, new ReferenceParam("PAT"));
IBundleProvider outcome = myAuditEventDao.search(map);
assertThat(toUnqualifiedVersionlessIdValues(outcome), contains("AuditEvent/AUDIT"));
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
} finally {
myInterceptorRegistry.unregisterInterceptor(interceptor);
}
ArgumentCaptor<HookParams> captor = ArgumentCaptor.forClass(HookParams.class);
verify(interceptor, times(1)).invoke(ArgumentMatchers.eq(Pointcut.JPA_PERFTRACE_WARNING), captor.capture());
StorageProcessingMessage message = captor.getValue().get(StorageProcessingMessage.class);
assertEquals("This search uses an unqualified resource(a parameter in a chain without a resource type). This is less efficient than using a qualified type. If you know what you're looking for, try qualifying it using the form: 'entity:[resourceType]'", message.getMessage());
}
@Test @Test
public void testSearchWithDateAndReusesExistingJoin() { public void testSearchWithDateAndReusesExistingJoin() {
// Add a search parameter to Observation.issued, so that between that one // Add a search parameter to Observation.issued, so that between that one

View File

@ -12,7 +12,10 @@ import ca.uhn.fhir.jpa.searchparam.SearchParamConstants;
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.api.Constants;
import ca.uhn.fhir.rest.api.server.IBundleProvider; import ca.uhn.fhir.rest.api.server.IBundleProvider;
import ca.uhn.fhir.rest.param.DateAndListParam;
import ca.uhn.fhir.rest.param.DateOrListParam;
import ca.uhn.fhir.rest.param.DateParam; import ca.uhn.fhir.rest.param.DateParam;
import ca.uhn.fhir.rest.param.DateRangeParam;
import ca.uhn.fhir.rest.param.ReferenceParam; import ca.uhn.fhir.rest.param.ReferenceParam;
import ca.uhn.fhir.rest.param.StringParam; import ca.uhn.fhir.rest.param.StringParam;
import ca.uhn.fhir.rest.param.TokenParam; import ca.uhn.fhir.rest.param.TokenParam;
@ -1156,7 +1159,239 @@ public class PartitioningR4Test extends BaseJpaR4SystemTest {
assertEquals(1, StringUtils.countMatches(searchSql, "PARTITION_ID")); assertEquals(1, StringUtils.countMatches(searchSql, "PARTITION_ID"));
} }
// FIXME: add DATE and DATE RANGE test @Test
public void testSearch_DateParam_SearchAllPartitions() {
myPartitionConfig.setIncludePartitionInSearchHashes(false);
IIdType patientIdNull = createPatient(null, withBirthdate("2020-04-20"));
IIdType patientId1 = createPatient(1, withBirthdate("2020-04-20"));
IIdType patientId2 = createPatient(2, withBirthdate("2020-04-20"));
createPatient(null, withBirthdate("2021-04-20"));
createPatient(1, withBirthdate("2021-04-20"));
createPatient(2, withBirthdate("2021-04-20"));
// Date param
addReadPartition(null);
myCaptureQueriesListener.clear();
SearchParameterMap map = new SearchParameterMap();
map.add(Patient.SP_BIRTHDATE, new DateParam("2020-04-20"));
map.setLoadSynchronous(true);
IBundleProvider results = myPatientDao.search(map);
List<IIdType> ids = toUnqualifiedVersionlessIds(results);
assertThat(ids, Matchers.contains(patientIdNull, patientId1, patientId2));
String searchSql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, true);
ourLog.info("Search SQL:\n{}", searchSql);
assertEquals(0, StringUtils.countMatches(searchSql, "PARTITION_ID"));
assertEquals(1, StringUtils.countMatches(searchSql, "SP_VALUE_LOW"));
// Date OR param
addReadPartition(null);
myCaptureQueriesListener.clear();
map = new SearchParameterMap();
map.add(Patient.SP_BIRTHDATE, new DateOrListParam().addOr(new DateParam("2020-04-20")).addOr(new DateParam("2020-04-22")));
map.setLoadSynchronous(true);
results = myPatientDao.search(map);
ids = toUnqualifiedVersionlessIds(results);
assertThat(ids, Matchers.contains(patientIdNull, patientId1, patientId2));
searchSql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, true);
ourLog.info("Search SQL:\n{}", searchSql);
assertEquals(0, StringUtils.countMatches(searchSql, "PARTITION_ID"));
assertEquals(2, StringUtils.countMatches(searchSql, "SP_VALUE_LOW"));
// Date AND param
addReadPartition(null);
myCaptureQueriesListener.clear();
map = new SearchParameterMap();
map.add(Patient.SP_BIRTHDATE, new DateAndListParam().addAnd(new DateOrListParam().addOr(new DateParam("2020"))).addAnd(new DateOrListParam().addOr(new DateParam("2020-04-20"))));
map.setLoadSynchronous(true);
results = myPatientDao.search(map);
ids = toUnqualifiedVersionlessIds(results);
assertThat(ids, Matchers.contains(patientIdNull, patientId1, patientId2));
searchSql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, true);
ourLog.info("Search SQL:\n{}", searchSql);
assertEquals(0, StringUtils.countMatches(searchSql, "PARTITION_ID"));
assertEquals(2, StringUtils.countMatches(searchSql, "SP_VALUE_LOW"));
// DateRangeParam
addReadPartition(null);
myCaptureQueriesListener.clear();
map = new SearchParameterMap();
map.add(Patient.SP_BIRTHDATE, new DateRangeParam(new DateParam("2020-01-01"), new DateParam("2020-04-25")));
map.setLoadSynchronous(true);
results = myPatientDao.search(map);
ids = toUnqualifiedVersionlessIds(results);
assertThat(ids, Matchers.contains(patientIdNull, patientId1, patientId2));
searchSql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, true);
ourLog.info("Search SQL:\n{}", searchSql);
assertEquals(0, StringUtils.countMatches(searchSql, "PARTITION_ID"));
assertEquals(2, StringUtils.countMatches(searchSql, "SP_VALUE_LOW"));
}
@Test
public void testSearch_DateParam_SearchSpecificPartitions() {
myPartitionConfig.setIncludePartitionInSearchHashes(false);
IIdType patientIdNull = createPatient(null, withBirthdate("2020-04-20"));
IIdType patientId1 = createPatient(1, withBirthdate("2020-04-20"));
IIdType patientId2 = createPatient(2, withBirthdate("2020-04-20"));
createPatient(null, withBirthdate("2021-04-20"));
createPatient(1, withBirthdate("2021-04-20"));
createPatient(2, withBirthdate("2021-04-20"));
// Date param
addReadPartition(1);
myCaptureQueriesListener.clear();
SearchParameterMap map = new SearchParameterMap();
map.add(Patient.SP_BIRTHDATE, new DateParam("2020-04-20"));
map.setLoadSynchronous(true);
IBundleProvider results = myPatientDao.search(map);
List<IIdType> ids = toUnqualifiedVersionlessIds(results);
assertThat(ids, Matchers.contains(patientId1));
String searchSql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, true);
ourLog.info("Search SQL:\n{}", searchSql);
assertEquals(1, StringUtils.countMatches(searchSql, "PARTITION_ID"));
assertEquals(1, StringUtils.countMatches(searchSql, "SP_VALUE_LOW"));
// Date OR param
addReadPartition(1);
myCaptureQueriesListener.clear();
map = new SearchParameterMap();
map.add(Patient.SP_BIRTHDATE, new DateOrListParam().addOr(new DateParam("2020-04-20")).addOr(new DateParam("2020-04-22")));
map.setLoadSynchronous(true);
results = myPatientDao.search(map);
ids = toUnqualifiedVersionlessIds(results);
assertThat(ids, Matchers.contains(patientId1));
searchSql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, true);
ourLog.info("Search SQL:\n{}", searchSql);
assertEquals(1, StringUtils.countMatches(searchSql, "PARTITION_ID"));
assertEquals(2, StringUtils.countMatches(searchSql, "SP_VALUE_LOW"));
// Date AND param
addReadPartition(1);
myCaptureQueriesListener.clear();
map = new SearchParameterMap();
map.add(Patient.SP_BIRTHDATE, new DateAndListParam().addAnd(new DateOrListParam().addOr(new DateParam("2020"))).addAnd(new DateOrListParam().addOr(new DateParam("2020-04-20"))));
map.setLoadSynchronous(true);
results = myPatientDao.search(map);
ids = toUnqualifiedVersionlessIds(results);
assertThat(ids, Matchers.contains(patientId1));
searchSql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, true);
ourLog.info("Search SQL:\n{}", searchSql);
assertEquals(1, StringUtils.countMatches(searchSql, "PARTITION_ID"));
assertEquals(2, StringUtils.countMatches(searchSql, "SP_VALUE_LOW"));
// DateRangeParam
addReadPartition(1);
myCaptureQueriesListener.clear();
map = new SearchParameterMap();
map.add(Patient.SP_BIRTHDATE, new DateRangeParam(new DateParam("2020-01-01"), new DateParam("2020-04-25")));
map.setLoadSynchronous(true);
results = myPatientDao.search(map);
ids = toUnqualifiedVersionlessIds(results);
assertThat(ids, Matchers.contains(patientId1));
searchSql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, true);
ourLog.info("Search SQL:\n{}", searchSql);
assertEquals(1, StringUtils.countMatches(searchSql, "PARTITION_ID"));
assertEquals(2, StringUtils.countMatches(searchSql, "SP_VALUE_LOW"));
}
@Test
public void testSearch_DateParam_SearchDefaultPartitions() {
myPartitionConfig.setIncludePartitionInSearchHashes(false);
IIdType patientIdNull = createPatient(null, withBirthdate("2020-04-20"));
IIdType patientId1 = createPatient(1, withBirthdate("2020-04-20"));
IIdType patientId2 = createPatient(2, withBirthdate("2020-04-20"));
createPatient(null, withBirthdate("2021-04-20"));
createPatient(1, withBirthdate("2021-04-20"));
createPatient(2, withBirthdate("2021-04-20"));
// Date param
addDefaultReadPartition();
myCaptureQueriesListener.clear();
SearchParameterMap map = new SearchParameterMap();
map.add(Patient.SP_BIRTHDATE, new DateParam("2020-04-20"));
map.setLoadSynchronous(true);
IBundleProvider results = myPatientDao.search(map);
List<IIdType> ids = toUnqualifiedVersionlessIds(results);
assertThat(ids, Matchers.contains(patientIdNull));
String searchSql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, true);
ourLog.info("Search SQL:\n{}", searchSql);
assertEquals(1, StringUtils.countMatches(searchSql, "PARTITION_ID"));
assertEquals(1, StringUtils.countMatches(searchSql, "SP_VALUE_LOW"));
// Date OR param
addDefaultReadPartition();
myCaptureQueriesListener.clear();
map = new SearchParameterMap();
map.add(Patient.SP_BIRTHDATE, new DateOrListParam().addOr(new DateParam("2020-04-20")).addOr(new DateParam("2020-04-22")));
map.setLoadSynchronous(true);
results = myPatientDao.search(map);
ids = toUnqualifiedVersionlessIds(results);
assertThat(ids, Matchers.contains(patientIdNull));
searchSql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, true);
ourLog.info("Search SQL:\n{}", searchSql);
assertEquals(1, StringUtils.countMatches(searchSql, "PARTITION_ID"));
assertEquals(2, StringUtils.countMatches(searchSql, "SP_VALUE_LOW"));
// Date AND param
addDefaultReadPartition();
myCaptureQueriesListener.clear();
map = new SearchParameterMap();
map.add(Patient.SP_BIRTHDATE, new DateAndListParam().addAnd(new DateOrListParam().addOr(new DateParam("2020"))).addAnd(new DateOrListParam().addOr(new DateParam("2020-04-20"))));
map.setLoadSynchronous(true);
results = myPatientDao.search(map);
ids = toUnqualifiedVersionlessIds(results);
assertThat(ids, Matchers.contains(patientIdNull));
searchSql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, true);
ourLog.info("Search SQL:\n{}", searchSql);
assertEquals(1, StringUtils.countMatches(searchSql, "PARTITION_ID"));
assertEquals(2, StringUtils.countMatches(searchSql, "SP_VALUE_LOW"));
// DateRangeParam
addDefaultReadPartition();
myCaptureQueriesListener.clear();
map = new SearchParameterMap();
map.add(Patient.SP_BIRTHDATE, new DateRangeParam(new DateParam("2020-01-01"), new DateParam("2020-04-25")));
map.setLoadSynchronous(true);
results = myPatientDao.search(map);
ids = toUnqualifiedVersionlessIds(results);
assertThat(ids, Matchers.contains(patientIdNull));
searchSql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, true);
ourLog.info("Search SQL:\n{}", searchSql);
assertEquals(1, StringUtils.countMatches(searchSql, "PARTITION_ID"));
assertEquals(2, StringUtils.countMatches(searchSql, "SP_VALUE_LOW"));
}
@Test @Test
public void testSearch_StringParam_SearchAllPartitions() { public void testSearch_StringParam_SearchAllPartitions() {
@ -1298,6 +1533,80 @@ public class PartitioningR4Test extends BaseJpaR4SystemTest {
assertEquals(1, StringUtils.countMatches(searchSql, "SP_VALUE_NORMALIZED")); assertEquals(1, StringUtils.countMatches(searchSql, "SP_VALUE_NORMALIZED"));
} }
@Test
public void testSearch_TagNotParam_SearchAllPartitions() {
IIdType patientIdNull = createPatient(null, withActiveTrue(), withTag("http://system", "code"));
IIdType patientId1 = createPatient(1, withActiveTrue(), withTag("http://system", "code"));
IIdType patientId2 = createPatient(2, withActiveTrue(), withTag("http://system", "code"));
createPatient(null, withActiveTrue(), withTag("http://system", "code2"));
createPatient(1, withActiveTrue(), withTag("http://system", "code2"));
createPatient(2, withActiveTrue(), withTag("http://system", "code2"));
addReadPartition(null);
myCaptureQueriesListener.clear();
SearchParameterMap map = new SearchParameterMap();
map.add(Constants.PARAM_TAG, new TokenParam("http://system", "code2").setModifier(TokenParamModifier.NOT));
map.setLoadSynchronous(true);
IBundleProvider results = myPatientDao.search(map);
List<IIdType> ids = toUnqualifiedVersionlessIds(results);
assertThat(ids, Matchers.contains(patientIdNull, patientId1, patientId2));
String searchSql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, true);
ourLog.info("Search SQL:\n{}", searchSql);
assertEquals(0, StringUtils.countMatches(searchSql, "PARTITION_ID"));
assertEquals(1, StringUtils.countMatches(searchSql, "TAG_SYSTEM='http://system'"));
}
@Test
public void testSearch_TagNotParam_SearchDefaultPartition() {
IIdType patientIdNull = createPatient(null, withActiveTrue(), withTag("http://system", "code"));
createPatient(1, withActiveTrue(), withTag("http://system", "code"));
createPatient(2, withActiveTrue(), withTag("http://system", "code"));
addDefaultReadPartition();
myCaptureQueriesListener.clear();
SearchParameterMap map = new SearchParameterMap();
map.add(Constants.PARAM_TAG, new TokenParam("http://system", "code2").setModifier(TokenParamModifier.NOT));
map.setLoadSynchronous(true);
IBundleProvider results = myPatientDao.search(map);
List<IIdType> ids = toUnqualifiedVersionlessIds(results);
String searchSql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, true);
ourLog.info("Search SQL:\n{}", searchSql);
assertEquals(1, StringUtils.countMatches(searchSql, "PARTITION_ID"));
assertEquals(1, StringUtils.countMatches(searchSql, "PARTITION_ID is null"));
assertEquals(1, StringUtils.countMatches(searchSql, "TAG_SYSTEM='http://system'"));
assertThat(ids.toString(), ids, Matchers.contains(patientIdNull));
}
@Test
public void testSearch_TagNotParam_SearchOnePartition() {
createPatient(null, withActiveTrue(), withTag("http://system", "code"));
IIdType patientId1 = createPatient(1, withActiveTrue(), withTag("http://system", "code"));
createPatient(2, withActiveTrue(), withTag("http://system", "code"));
createPatient(null, withActiveTrue(), withTag("http://system", "code2"));
createPatient(1, withActiveTrue(), withTag("http://system", "code2"));
createPatient(2, withActiveTrue(), withTag("http://system", "code2"));
addReadPartition(1);
myCaptureQueriesListener.clear();
SearchParameterMap map = new SearchParameterMap();
map.add(Constants.PARAM_TAG, new TokenParam("http://system", "code2").setModifier(TokenParamModifier.NOT));
map.setLoadSynchronous(true);
IBundleProvider results = myPatientDao.search(map);
List<IIdType> ids = toUnqualifiedVersionlessIds(results);
assertThat(ids, Matchers.contains(patientId1));
String searchSql = myCaptureQueriesListener.getSelectQueriesForCurrentThread().get(0).getSql(true, true);
ourLog.info("Search SQL:\n{}", searchSql);
assertEquals(1, StringUtils.countMatches(searchSql, "PARTITION_ID"));
assertEquals(1, StringUtils.countMatches(searchSql, "TAG_SYSTEM='http://system'"));
}
@Test @Test
public void testSearch_TagParam_SearchAllPartitions() { public void testSearch_TagParam_SearchAllPartitions() {
IIdType patientIdNull = createPatient(null, withActiveTrue(), withTag("http://system", "code")); IIdType patientIdNull = createPatient(null, withActiveTrue(), withTag("http://system", "code"));

View File

@ -3,7 +3,7 @@ package ca.uhn.fhir.jpa.provider.r4;
import ca.uhn.fhir.jpa.model.config.PartitionConfig; import ca.uhn.fhir.jpa.model.config.PartitionConfig;
import ca.uhn.fhir.jpa.model.util.ProviderConstants; import ca.uhn.fhir.jpa.model.util.ProviderConstants;
import ca.uhn.fhir.jpa.partition.PartitionManagementProvider; import ca.uhn.fhir.jpa.partition.PartitionManagementProvider;
import ca.uhn.fhir.jpa.partition.RequestTenantPartitionInterceptor; import ca.uhn.fhir.rest.server.interceptor.partition.RequestTenantPartitionInterceptor;
import ca.uhn.fhir.jpa.util.TestUtil; import ca.uhn.fhir.jpa.util.TestUtil;
import ca.uhn.fhir.rest.client.interceptor.CapturingInterceptor; import ca.uhn.fhir.rest.client.interceptor.CapturingInterceptor;
import ca.uhn.fhir.rest.client.interceptor.UrlTenantSelectionInterceptor; import ca.uhn.fhir.rest.client.interceptor.UrlTenantSelectionInterceptor;

View File

@ -688,9 +688,8 @@ public class SystemProviderR4Test extends BaseJpaR4Test {
" <system value=\"http://healthcare.example.org/identifiers/encounter\"/>\n" + " <system value=\"http://healthcare.example.org/identifiers/encounter\"/>\n" +
" <value value=\"845962.8975469\"/>\n" + " <value value=\"845962.8975469\"/>\n" +
" </identifier>\n" + " </identifier>\n" +
// FIXME: restore " <status value=\"in-progress\"/>\n" +
// " <status value=\"in-progress\"/>\n" + " <class value=\"inpatient\"/>\n" +
// " <class value=\"inpatient\"/>\n" +
" <patient>\n" + " <patient>\n" +
" <reference value=\"Patient?family=van%20de%20Heuvelcx85ioqWJbI&amp;given=Pietercx85ioqWJbI\"/>\n" + " <reference value=\"Patient?family=van%20de%20Heuvelcx85ioqWJbI&amp;given=Pietercx85ioqWJbI\"/>\n" +
" </patient>\n" + " </patient>\n" +
@ -710,9 +709,6 @@ public class SystemProviderR4Test extends BaseJpaR4Test {
HttpPost req = new HttpPost(ourServerBase); HttpPost req = new HttpPost(ourServerBase);
req.setEntity(new StringEntity(input, ContentType.parse(Constants.CT_FHIR_XML + "; charset=utf-8"))); req.setEntity(new StringEntity(input, ContentType.parse(Constants.CT_FHIR_XML + "; charset=utf-8")));
// FIXME: remove
myDaoConfig.setIndexMissingFields(DaoConfig.IndexEnabledEnum.DISABLED);
CloseableHttpResponse resp = ourHttpClient.execute(req); CloseableHttpResponse resp = ourHttpClient.execute(req);
try { try {
String encoded = IOUtils.toString(resp.getEntity().getContent(), StandardCharsets.UTF_8); String encoded = IOUtils.toString(resp.getEntity().getContent(), StandardCharsets.UTF_8);

View File

@ -201,6 +201,7 @@ public class ResourceIndexedSearchParamDate extends BaseResourceIndexedSearchPar
return b.build(); return b.build();
} }
@SuppressWarnings("ConstantConditions")
@Override @Override
public boolean matches(IQueryParameterType theParam) { public boolean matches(IQueryParameterType theParam) {
if (!(theParam instanceof DateParam)) { if (!(theParam instanceof DateParam)) {
@ -216,7 +217,6 @@ public class ResourceIndexedSearchParamDate extends BaseResourceIndexedSearchPar
return false; return false;
} }
// FIXME: below is always true
boolean result = true; boolean result = true;
if (lowerBound != null) { if (lowerBound != null) {
result &= (myValueLow.after(lowerBound) || myValueLow.equals(lowerBound)); result &= (myValueLow.after(lowerBound) || myValueLow.equals(lowerBound));

View File

@ -1,8 +1,8 @@
package ca.uhn.fhir.jpa.partition; package ca.uhn.fhir.rest.server.interceptor.partition;
/*- /*-
* #%L * #%L
* HAPI FHIR JPA Server * HAPI FHIR - Server Framework
* %% * %%
* Copyright (C) 2014 - 2020 University Health Network * Copyright (C) 2014 - 2020 University Health Network
* %% * %%
@ -26,19 +26,28 @@ import ca.uhn.fhir.interceptor.api.Pointcut;
import ca.uhn.fhir.interceptor.model.PartitionId; import ca.uhn.fhir.interceptor.model.PartitionId;
import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
import org.hl7.fhir.instance.model.api.IBaseResource; import ca.uhn.fhir.rest.server.tenant.ITenantIdentificationStrategy;
import org.jetbrains.annotations.NotNull;
import javax.annotation.Nonnull;
import static org.apache.commons.lang3.StringUtils.isBlank; import static org.apache.commons.lang3.StringUtils.isBlank;
/** /**
* This interceptor uses the request tenant ID (as supplied to the server using
* {@link ca.uhn.fhir.rest.server.RestfulServer#setTenantIdentificationStrategy(ITenantIdentificationStrategy)}
* to indicate the partition ID. With this interceptor registered, The server treats the tenant name
* supplied by the {@link ITenantIdentificationStrategy tenant identification strategy} as a partition name.
* <p>
* Partition names (aka tenant IDs) must be registered in advance using the partition management operations.
* </p>
* *
* @since 5.0.0
*/ */
@Interceptor @Interceptor
public class RequestTenantPartitionInterceptor { public class RequestTenantPartitionInterceptor {
@Hook(Pointcut.STORAGE_PARTITION_IDENTIFY_CREATE) @Hook(Pointcut.STORAGE_PARTITION_IDENTIFY_CREATE)
public PartitionId PartitionIdentifyCreate(IBaseResource theResource, ServletRequestDetails theRequestDetails) { public PartitionId PartitionIdentifyCreate(ServletRequestDetails theRequestDetails) {
return extractPartitionIdFromRequest(theRequestDetails); return extractPartitionIdFromRequest(theRequestDetails);
} }
@ -47,7 +56,7 @@ public class RequestTenantPartitionInterceptor {
return extractPartitionIdFromRequest(theRequestDetails); return extractPartitionIdFromRequest(theRequestDetails);
} }
@NotNull @Nonnull
private PartitionId extractPartitionIdFromRequest(ServletRequestDetails theRequestDetails) { private PartitionId extractPartitionIdFromRequest(ServletRequestDetails theRequestDetails) {
// We will use the tenant ID that came from the request as the partition name // We will use the tenant ID that came from the request as the partition name

View File

@ -33,7 +33,16 @@ import static org.apache.commons.lang3.StringUtils.defaultIfBlank;
/** /**
* This class is a tenant identification strategy which assumes that a single path * This class is a tenant identification strategy which assumes that a single path
* element will be present between the server base URL and the beginning * element will be present between the server base URL and individual request.
* <p>
* For example,
* with this strategy enabled, given the following URL on a server with base URL <code>http://example.com/base</code>,
* the server will extract the <code>TENANT-A</code> portion of the URL and use it as the tenant identifier. The
* request will then proceed to read the resource with ID <code>Patient/123</code>.
* </p>
* <p>
* GET http://example.com/base/TENANT-A/Patient/123
* </p>
*/ */
public class UrlBaseTenantIdentificationStrategy implements ITenantIdentificationStrategy { public class UrlBaseTenantIdentificationStrategy implements ITenantIdentificationStrategy {