Merge branch 'master' of github.com:jamesagnew/hapi-fhir
This commit is contained in:
commit
201c3a6187
|
@ -159,6 +159,8 @@
|
||||||
<ignoredResource>javac.bat</ignoredResource>
|
<ignoredResource>javac.bat</ignoredResource>
|
||||||
<ignoredResource>about.html</ignoredResource>
|
<ignoredResource>about.html</ignoredResource>
|
||||||
<ignoredResource>changelog.xml</ignoredResource>
|
<ignoredResource>changelog.xml</ignoredResource>
|
||||||
|
<ignoredResource>.*/favicon.ico$</ignoredResource>
|
||||||
|
<ignoredResource>Log4j-charsets.properties</ignoredResource>
|
||||||
</ignoredResourcePatterns>
|
</ignoredResourcePatterns>
|
||||||
</configuration>
|
</configuration>
|
||||||
</plugin>
|
</plugin>
|
||||||
|
|
|
@ -1776,7 +1776,7 @@ public enum Pointcut {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* <b>Performance Tracing Hook:</b>
|
* <b>Performance Tracing Hook:</b>
|
||||||
* This hook is invoked when a search has failed for any reason. When this pointcut
|
* This hook is invoked when a search has completed. When this pointcut
|
||||||
* is invoked, a pass in the Search Coordinator has completed successfully, but
|
* is invoked, a pass in the Search Coordinator has completed successfully, but
|
||||||
* not all possible resources have been loaded yet so a future paging request
|
* not all possible resources have been loaded yet so a future paging request
|
||||||
* may trigger a new task that will load further resources.
|
* may trigger a new task that will load further resources.
|
||||||
|
@ -1814,6 +1814,44 @@ public enum Pointcut {
|
||||||
"ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails"
|
"ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails"
|
||||||
),
|
),
|
||||||
|
|
||||||
|
/**
|
||||||
|
* <b>Performance Tracing Hook:</b>
|
||||||
|
* This hook is invoked when a query involving an external index (e.g. Elasticsearch) has completed. When this pointcut
|
||||||
|
* is invoked, an initial list of resource IDs has been generated which will be used as part of a subsequent database query.
|
||||||
|
* <p>
|
||||||
|
* Note that this is a performance tracing hook. Use with caution in production
|
||||||
|
* systems, since calling it may (or may not) carry a cost.
|
||||||
|
* </p>
|
||||||
|
* Hooks may accept the following parameters:
|
||||||
|
* <ul>
|
||||||
|
* <li>
|
||||||
|
* ca.uhn.fhir.rest.api.server.RequestDetails - A bean containing details about the request that is about to be processed, including details such as the
|
||||||
|
* resource type and logical ID (if any) and other FHIR-specific aspects of the request which have been
|
||||||
|
* pulled out of the servlet request. Note that the bean
|
||||||
|
* properties are not all guaranteed to be populated, depending on how early during processing the
|
||||||
|
* exception occurred.
|
||||||
|
* </li>
|
||||||
|
* <li>
|
||||||
|
* ca.uhn.fhir.rest.server.servlet.ServletRequestDetails - A bean containing details about the request that is about to be processed, including details such as the
|
||||||
|
* resource type and logical ID (if any) and other FHIR-specific aspects of the request which have been
|
||||||
|
* pulled out of the servlet request. This parameter is identical to the RequestDetails parameter above but will
|
||||||
|
* only be populated when operating in a RestfulServer implementation. It is provided as a convenience.
|
||||||
|
* </li>
|
||||||
|
* <li>
|
||||||
|
* ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails - Contains details about the search being
|
||||||
|
* performed. Hooks should not modify this object.
|
||||||
|
* </li>
|
||||||
|
* </ul>
|
||||||
|
* <p>
|
||||||
|
* Hooks should return <code>void</code>.
|
||||||
|
* </p>
|
||||||
|
*/
|
||||||
|
JPA_PERFTRACE_INDEXSEARCH_QUERY_COMPLETE(void.class,
|
||||||
|
"ca.uhn.fhir.rest.api.server.RequestDetails",
|
||||||
|
"ca.uhn.fhir.rest.server.servlet.ServletRequestDetails",
|
||||||
|
"ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails"
|
||||||
|
),
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* <b>Performance Tracing Hook:</b>
|
* <b>Performance Tracing Hook:</b>
|
||||||
* Invoked when the storage engine is about to reuse the results of
|
* Invoked when the storage engine is about to reuse the results of
|
||||||
|
|
|
@ -108,7 +108,7 @@ public class RunServerCommand extends BaseCommand {
|
||||||
ContextHolder.setDisableReferentialIntegrity(true);
|
ContextHolder.setDisableReferentialIntegrity(true);
|
||||||
}
|
}
|
||||||
|
|
||||||
ContextHolder.setDatabaseUrl(theCommandLine.getOptionValue("u"));
|
ContextHolder.setDatabaseUrl(theCommandLine.getOptionValue("u"));
|
||||||
|
|
||||||
String reuseSearchResults = theCommandLine.getOptionValue(OPTION_REUSE_SEARCH_RESULTS_MILLIS);
|
String reuseSearchResults = theCommandLine.getOptionValue(OPTION_REUSE_SEARCH_RESULTS_MILLIS);
|
||||||
if (reuseSearchResults != null) {
|
if (reuseSearchResults != null) {
|
||||||
|
|
|
@ -179,10 +179,10 @@
|
||||||
</overlays>
|
</overlays>
|
||||||
<webXml>src/main/webapp/WEB-INF/web.xml</webXml>
|
<webXml>src/main/webapp/WEB-INF/web.xml</webXml>
|
||||||
<attachClasses>true</attachClasses>
|
<attachClasses>true</attachClasses>
|
||||||
<excludes>
|
<packagingExcludes>
|
||||||
WEB-INF/lib/Saxon-HE-*,
|
WEB-INF/lib/Saxon-HE-*,
|
||||||
WEB-INF/lib/hapi-*
|
WEB-INF/lib/hapi-*
|
||||||
</excludes>
|
</packagingExcludes>
|
||||||
</configuration>
|
</configuration>
|
||||||
</plugin>
|
</plugin>
|
||||||
|
|
||||||
|
|
|
@ -20,18 +20,28 @@ package ca.uhn.fhir.jpa.demo;
|
||||||
* #L%
|
* #L%
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import ca.uhn.fhir.context.ConfigurationException;
|
||||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||||
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
||||||
import ca.uhn.fhir.jpa.model.entity.ModelConfig;
|
import ca.uhn.fhir.jpa.model.entity.ModelConfig;
|
||||||
import ca.uhn.fhir.jpa.search.LuceneSearchMappingFactory;
|
import ca.uhn.fhir.jpa.search.LuceneSearchMappingFactory;
|
||||||
|
import ca.uhn.fhir.jpa.search.elastic.ElasticsearchHibernatePropertiesBuilder;
|
||||||
import org.apache.commons.dbcp2.BasicDataSource;
|
import org.apache.commons.dbcp2.BasicDataSource;
|
||||||
import org.apache.commons.lang3.time.DateUtils;
|
import org.apache.commons.lang3.time.DateUtils;
|
||||||
import org.hibernate.dialect.H2Dialect;
|
import org.hibernate.dialect.H2Dialect;
|
||||||
|
import org.hibernate.search.elasticsearch.cfg.ElasticsearchIndexStatus;
|
||||||
|
import org.hibernate.search.elasticsearch.cfg.IndexSchemaManagementStrategy;
|
||||||
import org.springframework.context.annotation.Bean;
|
import org.springframework.context.annotation.Bean;
|
||||||
import org.springframework.context.annotation.Configuration;
|
import org.springframework.context.annotation.Configuration;
|
||||||
|
import pl.allegro.tech.embeddedelasticsearch.EmbeddedElastic;
|
||||||
|
import pl.allegro.tech.embeddedelasticsearch.PopularProperties;
|
||||||
|
|
||||||
|
import javax.annotation.PreDestroy;
|
||||||
import javax.sql.DataSource;
|
import javax.sql.DataSource;
|
||||||
|
import java.io.IOException;
|
||||||
import java.util.Properties;
|
import java.util.Properties;
|
||||||
|
import java.util.UUID;
|
||||||
|
import java.util.concurrent.TimeUnit;
|
||||||
|
|
||||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||||
|
|
||||||
|
@ -100,7 +110,55 @@ public class CommonConfig {
|
||||||
extraProperties.put("hibernate.search.autoregister_listeners", "false");
|
extraProperties.put("hibernate.search.autoregister_listeners", "false");
|
||||||
}
|
}
|
||||||
|
|
||||||
return extraProperties;
|
return configureElasticearch(extraProperties);
|
||||||
|
}
|
||||||
|
|
||||||
|
private Properties configureElasticearch(Properties theExtraProperties) {
|
||||||
|
|
||||||
|
String elasticsearchHost = "localhost";
|
||||||
|
String elasticsearchUserId = "";
|
||||||
|
String elasticsearchPassword = "";
|
||||||
|
int elasticsearchPort = embeddedElasticSearch().getHttpPort();
|
||||||
|
|
||||||
|
new ElasticsearchHibernatePropertiesBuilder()
|
||||||
|
.setDebugRefreshAfterWrite(true)
|
||||||
|
.setDebugPrettyPrintJsonLog(true)
|
||||||
|
.setIndexSchemaManagementStrategy(IndexSchemaManagementStrategy.CREATE)
|
||||||
|
.setIndexManagementWaitTimeoutMillis(10000)
|
||||||
|
.setRequiredIndexStatus(ElasticsearchIndexStatus.YELLOW)
|
||||||
|
.setRestUrl("http://" + elasticsearchHost + ":" + elasticsearchPort)
|
||||||
|
.setUsername(elasticsearchUserId)
|
||||||
|
.setPassword(elasticsearchPassword)
|
||||||
|
.apply(theExtraProperties);
|
||||||
|
|
||||||
|
return theExtraProperties;
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public EmbeddedElastic embeddedElasticSearch() {
|
||||||
|
String ELASTIC_VERSION = "6.5.4";
|
||||||
|
|
||||||
|
EmbeddedElastic embeddedElastic = null;
|
||||||
|
try {
|
||||||
|
embeddedElastic = EmbeddedElastic.builder()
|
||||||
|
.withElasticVersion(ELASTIC_VERSION)
|
||||||
|
.withSetting(PopularProperties.TRANSPORT_TCP_PORT, 0)
|
||||||
|
.withSetting(PopularProperties.HTTP_PORT, 0)
|
||||||
|
.withSetting(PopularProperties.CLUSTER_NAME, UUID.randomUUID())
|
||||||
|
.withStartTimeout(60, TimeUnit.SECONDS)
|
||||||
|
.build()
|
||||||
|
.start();
|
||||||
|
} catch (IOException | InterruptedException e) {
|
||||||
|
throw new ConfigurationException(e);
|
||||||
|
}
|
||||||
|
|
||||||
|
return embeddedElastic;
|
||||||
|
}
|
||||||
|
|
||||||
|
@PreDestroy
|
||||||
|
public void stop() {
|
||||||
|
embeddedElasticSearch().stop();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
|
|
|
@ -0,0 +1,4 @@
|
||||||
|
---
|
||||||
|
type: add
|
||||||
|
issue: 1666
|
||||||
|
title: "The email sender used by email subscriptions can now be configured with TLS parameters."
|
|
@ -0,0 +1,4 @@
|
||||||
|
---
|
||||||
|
type: add
|
||||||
|
issue: 1867
|
||||||
|
title: "Initial implementation of lastn operation that uses an Elasticsearch v6 server to index observations."
|
|
@ -0,0 +1,7 @@
|
||||||
|
---
|
||||||
|
type: add
|
||||||
|
issue: 1871
|
||||||
|
title: "In the JPA server, when indexing Date SearchParameters where the value being indexed is a
|
||||||
|
FHIR Period that is missing either a lower bound or an upper bound, a default value representing an
|
||||||
|
extreme 'beginning of time' or 'end of time' is now used. This allows range searches to return more
|
||||||
|
accurate results."
|
|
@ -522,3 +522,72 @@ The following columns are common to **all HFJ_SPIDX_xxx tables**.
|
||||||
</tbody>
|
</tbody>
|
||||||
</table>
|
</table>
|
||||||
|
|
||||||
|
# HFJ_SPIDX_DATE: Date Search Parameters
|
||||||
|
|
||||||
|
For any FHIR Search Parameter of type *date* that generates a database index, a row in the *HFJ_SPIDX_DATE* table will be created.
|
||||||
|
|
||||||
|
## Columns
|
||||||
|
|
||||||
|
The following columns are common to **all HFJ_SPIDX_xxx tables**.
|
||||||
|
|
||||||
|
<table class="table table-striped table-condensed">
|
||||||
|
<thead>
|
||||||
|
<tr>
|
||||||
|
<th>Name</th>
|
||||||
|
<th>Relationships</th>
|
||||||
|
<th>Datatype</th>
|
||||||
|
<th>Nullable</th>
|
||||||
|
<th>Description</th>
|
||||||
|
</tr>
|
||||||
|
</thead>
|
||||||
|
<tbody>
|
||||||
|
<tr>
|
||||||
|
<td>SP_VALUE_LOW</td>
|
||||||
|
<td></td>
|
||||||
|
<td>Timestamp</td>
|
||||||
|
<td>Nullable</td>
|
||||||
|
<td>
|
||||||
|
This is the lower bound of the date in question.
|
||||||
|
<ul>
|
||||||
|
<li>For a point in time date to millisecond precision (such as an Instant with a value of <code>2020-05-26T15:00:00.000</code>) this represents the exact value.</li>
|
||||||
|
<li>For an instant value with lower precision, this represents the start of the possible range denoted by the value. For example, for a value of <code>2020-05-26</code> this represents <code>2020-05-26T00:00:00.000</code>.</li>
|
||||||
|
<li>For a Period with a lower (start) value present, this column contains that value.</li>
|
||||||
|
<li>For a Period with no lower (start) value present, this column contains a timestamp representing the "start of time".</li>
|
||||||
|
</ul>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>SP_VALUE_HIGH</td>
|
||||||
|
<td></td>
|
||||||
|
<td>Timestamp</td>
|
||||||
|
<td>Nullable</td>
|
||||||
|
<td>
|
||||||
|
This is the upper bound of the date in question.
|
||||||
|
<ul>
|
||||||
|
<li>For a point in time date to millisecond precision (such as an Instant with a value of <code>2020-05-26T15:00:00.000</code>) this represents the exact value.</li>
|
||||||
|
<li>For an instant value with lower precision, this represents the start of the possible range denoted by the value. For example, for a value of <code>2020-05-26</code> this represents <code>2020-05-26T23:59:59.999</code>.</li>
|
||||||
|
<li>For a Period with an upper (end) value present, this column contains that value.</li>
|
||||||
|
<li>For a Period with no upper (end) value present, this column contains a timestamp representing the "end of time".</li>
|
||||||
|
</ul>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>SP_VALUE_LOW_DATE_ORDINAL</td>
|
||||||
|
<td></td>
|
||||||
|
<td>Integer</td>
|
||||||
|
<td>Nullable</td>
|
||||||
|
<td>
|
||||||
|
This column contains the same Timestamp as <code>SP_VALUE_LOW</code>, but truncated to Date precision and formatted as an integer in the format "YYYYMMDD".
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>SP_VALUE_HIGH_DATE_ORDINAL</td>
|
||||||
|
<td></td>
|
||||||
|
<td>Integer</td>
|
||||||
|
<td>Nullable</td>
|
||||||
|
<td>
|
||||||
|
This column contains the same Timestamp as <code>SP_VALUE_HIGH</code>, but truncated to Date precision and formatted as an integer in the format "YYYYMMDD".
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
|
|
@ -0,0 +1,144 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
|
||||||
|
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
|
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||||
|
<modelVersion>4.0.0</modelVersion>
|
||||||
|
|
||||||
|
<parent>
|
||||||
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
|
<artifactId>hapi-fhir</artifactId>
|
||||||
|
<version>5.1.0-SNAPSHOT</version>
|
||||||
|
<relativePath>../pom.xml</relativePath>
|
||||||
|
</parent>
|
||||||
|
|
||||||
|
<artifactId>hapi-fhir-elasticsearch-6</artifactId>
|
||||||
|
|
||||||
|
<name>hapi-fhir-elasticsearch-6</name>
|
||||||
|
|
||||||
|
<properties>
|
||||||
|
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
||||||
|
<maven.compiler.source>1.7</maven.compiler.source>
|
||||||
|
<maven.compiler.target>1.7</maven.compiler.target>
|
||||||
|
</properties>
|
||||||
|
|
||||||
|
<dependencies>
|
||||||
|
<dependency>
|
||||||
|
<groupId>junit</groupId>
|
||||||
|
<artifactId>junit</artifactId>
|
||||||
|
<version>4.12</version>
|
||||||
|
<scope>test</scope>
|
||||||
|
</dependency>
|
||||||
|
<!-- Elasticsearch -->
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.elasticsearch.client</groupId>
|
||||||
|
<artifactId>elasticsearch-rest-high-level-client</artifactId>
|
||||||
|
<version>6.5.4</version>
|
||||||
|
<exclusions>
|
||||||
|
<!-- The following all need to be excluded to avoid conflicts with Hibernate-Search -->
|
||||||
|
<exclusion>
|
||||||
|
<groupId>com.fasterxml.jackson.core</groupId>
|
||||||
|
<artifactId>jackson-core</artifactId>
|
||||||
|
</exclusion>
|
||||||
|
<exclusion>
|
||||||
|
<groupId>com.fasterxml.jackson.dataformat</groupId>
|
||||||
|
<artifactId>*</artifactId>
|
||||||
|
</exclusion>
|
||||||
|
<exclusion>
|
||||||
|
<groupId>com.github.spullara.mustache.java</groupId>
|
||||||
|
<artifactId>compiler</artifactId>
|
||||||
|
</exclusion>
|
||||||
|
<exclusion>
|
||||||
|
<groupId>com.tdunning</groupId>
|
||||||
|
<artifactId>t-digest</artifactId>
|
||||||
|
</exclusion>
|
||||||
|
<exclusion>
|
||||||
|
<groupId>commons-codec</groupId>
|
||||||
|
<artifactId>commons-codec</artifactId>
|
||||||
|
</exclusion>
|
||||||
|
<exclusion>
|
||||||
|
<groupId>commons-logging</groupId>
|
||||||
|
<artifactId>commons-logging</artifactId>
|
||||||
|
</exclusion>
|
||||||
|
<exclusion>
|
||||||
|
<groupId>net.bytebuddy</groupId>
|
||||||
|
<artifactId>byte-buddy</artifactId>
|
||||||
|
</exclusion>
|
||||||
|
<exclusion>
|
||||||
|
<groupId>net.sf.jopt-simple</groupId>
|
||||||
|
<artifactId>jopt-simple</artifactId>
|
||||||
|
</exclusion>
|
||||||
|
<exclusion>
|
||||||
|
<groupId>org.apache.httpcomponents</groupId>
|
||||||
|
<artifactId>*</artifactId>
|
||||||
|
</exclusion>
|
||||||
|
<exclusion>
|
||||||
|
<groupId>org.apache.lucene</groupId>
|
||||||
|
<artifactId>lucene-analyzers-common</artifactId>
|
||||||
|
</exclusion>
|
||||||
|
<exclusion>
|
||||||
|
<groupId>org.apache.lucene</groupId>
|
||||||
|
<artifactId>lucene-backward-codecs</artifactId>
|
||||||
|
</exclusion>
|
||||||
|
<exclusion>
|
||||||
|
<groupId>org.apache.lucene</groupId>
|
||||||
|
<artifactId>lucene-sandbox</artifactId>
|
||||||
|
</exclusion>
|
||||||
|
<exclusion>
|
||||||
|
<groupId>org.elasticsearch</groupId>
|
||||||
|
<artifactId>jna</artifactId>
|
||||||
|
</exclusion>
|
||||||
|
<exclusion>
|
||||||
|
<groupId>org.hdrhistogram</groupId>
|
||||||
|
<artifactId>HdrHistogram</artifactId>
|
||||||
|
</exclusion>
|
||||||
|
<exclusion>
|
||||||
|
<groupId>org.yaml</groupId>
|
||||||
|
<artifactId>snakeyaml</artifactId>
|
||||||
|
</exclusion>
|
||||||
|
</exclusions>
|
||||||
|
</dependency>
|
||||||
|
</dependencies>
|
||||||
|
|
||||||
|
<build>
|
||||||
|
<plugins>
|
||||||
|
<plugin>
|
||||||
|
<artifactId>maven-shade-plugin</artifactId>
|
||||||
|
<version>3.2.1</version>
|
||||||
|
<executions>
|
||||||
|
<execution>
|
||||||
|
<phase>package</phase>
|
||||||
|
<goals>
|
||||||
|
<goal>shade</goal>
|
||||||
|
</goals>
|
||||||
|
<configuration>
|
||||||
|
<shadedArtifactAttached>true</shadedArtifactAttached>
|
||||||
|
<shadedClassifierName>shaded6</shadedClassifierName> <!-- Any name that makes sense -->
|
||||||
|
<relocations>
|
||||||
|
<relocation>
|
||||||
|
<pattern>com.carrotsearch.hppc</pattern>
|
||||||
|
<shadedPattern>com.shadehapi.carrotsearch.hppc</shadedPattern>
|
||||||
|
</relocation>
|
||||||
|
<relocation>
|
||||||
|
<pattern>org.apache.logging.log4j</pattern>
|
||||||
|
<shadedPattern>org.shadehapi.apache.logging.log4j</shadedPattern>
|
||||||
|
</relocation>
|
||||||
|
<relocation>
|
||||||
|
<pattern>org.apache.lucene</pattern>
|
||||||
|
<shadedPattern>org.shadehapi.apache.lucene</shadedPattern>
|
||||||
|
</relocation>
|
||||||
|
<relocation>
|
||||||
|
<pattern>org.elasticsearch</pattern>
|
||||||
|
<shadedPattern>org.shadehapi.elasticsearch</shadedPattern>
|
||||||
|
</relocation>
|
||||||
|
<reloaction>
|
||||||
|
<pattern>org.joda</pattern>
|
||||||
|
<shadedPattern>org.shadehapi.joda</shadedPattern>
|
||||||
|
</reloaction>
|
||||||
|
</relocations>
|
||||||
|
</configuration>
|
||||||
|
</execution>
|
||||||
|
</executions>
|
||||||
|
</plugin>
|
||||||
|
</plugins>
|
||||||
|
</build>
|
||||||
|
</project>
|
|
@ -204,6 +204,9 @@ public interface IFhirResourceDao<T extends IBaseResource> extends IDao {
|
||||||
|
|
||||||
IBundleProvider search(SearchParameterMap theParams, RequestDetails theRequestDetails, HttpServletResponse theServletResponse);
|
IBundleProvider search(SearchParameterMap theParams, RequestDetails theRequestDetails, HttpServletResponse theServletResponse);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Search for IDs for processing a match URLs, etc.
|
||||||
|
*/
|
||||||
Set<ResourcePersistentId> searchForIds(SearchParameterMap theParams, RequestDetails theRequest);
|
Set<ResourcePersistentId> searchForIds(SearchParameterMap theParams, RequestDetails theRequest);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -0,0 +1,43 @@
|
||||||
|
package ca.uhn.fhir.jpa.api.dao;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||||
|
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||||
|
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||||
|
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||||
|
|
||||||
|
import javax.servlet.http.HttpServletResponse;
|
||||||
|
|
||||||
|
/*
|
||||||
|
* #%L
|
||||||
|
* HAPI FHIR JPA Server
|
||||||
|
* %%
|
||||||
|
* Copyright (C) 2014 - 2020 University Health Network
|
||||||
|
* %%
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
* #L%
|
||||||
|
*/
|
||||||
|
|
||||||
|
public interface IFhirResourceDaoObservation<T extends IBaseResource> extends IFhirResourceDao<T> {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns a BundleProvider which can be used to implement the $lastn operation.
|
||||||
|
* @param paramMap Parameters supported include Observation.subject, Observation.patient, Observation.code,
|
||||||
|
* Observation.category, and max (the maximum number of Observations to return per specified subjects/patients,
|
||||||
|
* codes, and/or categories.
|
||||||
|
* @param theRequestDetails
|
||||||
|
* @param theServletResponse
|
||||||
|
* @return
|
||||||
|
*/
|
||||||
|
IBundleProvider observationsLastN(SearchParameterMap paramMap, RequestDetails theRequestDetails, HttpServletResponse theServletResponse);
|
||||||
|
|
||||||
|
}
|
|
@ -145,6 +145,13 @@
|
||||||
<artifactId>hapi-fhir-validation-resources-r5</artifactId>
|
<artifactId>hapi-fhir-validation-resources-r5</artifactId>
|
||||||
<version>${project.version}</version>
|
<version>${project.version}</version>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||||
|
<artifactId>hapi-fhir-elasticsearch-6</artifactId>
|
||||||
|
<version>${project.version}</version>
|
||||||
|
<classifier>shaded6</classifier>
|
||||||
|
</dependency>
|
||||||
|
|
||||||
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>net.ttddyy</groupId>
|
<groupId>net.ttddyy</groupId>
|
||||||
|
@ -607,6 +614,28 @@
|
||||||
</plugins>
|
</plugins>
|
||||||
</pluginManagement>
|
</pluginManagement>
|
||||||
<plugins>
|
<plugins>
|
||||||
|
<plugin>
|
||||||
|
<groupId>org.apache.maven.plugins</groupId>
|
||||||
|
<artifactId>maven-failsafe-plugin</artifactId>
|
||||||
|
<version>2.22.2</version>
|
||||||
|
<configuration>
|
||||||
|
<redirectTestOutputToFile>true</redirectTestOutputToFile>
|
||||||
|
<forkCount>1</forkCount>
|
||||||
|
<reuseForks>false</reuseForks>
|
||||||
|
<runOrder>alphabetical</runOrder>
|
||||||
|
<includes>
|
||||||
|
<include>**/*IT.java</include>
|
||||||
|
</includes>
|
||||||
|
</configuration>
|
||||||
|
<executions>
|
||||||
|
<execution>
|
||||||
|
<goals>
|
||||||
|
<goal>integration-test</goal>
|
||||||
|
<goal>verify</goal>
|
||||||
|
</goals>
|
||||||
|
</execution>
|
||||||
|
</executions>
|
||||||
|
</plugin>
|
||||||
<plugin>
|
<plugin>
|
||||||
<groupId>de.jpdigital</groupId>
|
<groupId>de.jpdigital</groupId>
|
||||||
<artifactId>hibernate54-ddl-maven-plugin</artifactId>
|
<artifactId>hibernate54-ddl-maven-plugin</artifactId>
|
||||||
|
|
|
@ -23,6 +23,7 @@ package ca.uhn.fhir.jpa.config;
|
||||||
import ca.uhn.fhir.context.support.DefaultProfileValidationSupport;
|
import ca.uhn.fhir.context.support.DefaultProfileValidationSupport;
|
||||||
import ca.uhn.fhir.context.support.IValidationSupport;
|
import ca.uhn.fhir.context.support.IValidationSupport;
|
||||||
import ca.uhn.fhir.jpa.dao.JpaPersistedResourceValidationSupport;
|
import ca.uhn.fhir.jpa.dao.JpaPersistedResourceValidationSupport;
|
||||||
|
import ca.uhn.fhir.jpa.dao.ObservationLastNIndexPersistSvc;
|
||||||
import ca.uhn.fhir.jpa.term.TermCodeSystemStorageSvcImpl;
|
import ca.uhn.fhir.jpa.term.TermCodeSystemStorageSvcImpl;
|
||||||
import ca.uhn.fhir.jpa.term.TermDeferredStorageSvcImpl;
|
import ca.uhn.fhir.jpa.term.TermDeferredStorageSvcImpl;
|
||||||
import ca.uhn.fhir.jpa.term.TermReindexingSvcImpl;
|
import ca.uhn.fhir.jpa.term.TermReindexingSvcImpl;
|
||||||
|
@ -96,4 +97,11 @@ public abstract class BaseConfigDstu3Plus extends BaseConfig {
|
||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
public abstract ITermReadSvc terminologyService();
|
public abstract ITermReadSvc terminologyService();
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public ObservationLastNIndexPersistSvc baseObservationLastNIndexpersistSvc() {
|
||||||
|
return new ObservationLastNIndexPersistSvc();
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1221,9 +1221,9 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public ResourceTable updateInternal(RequestDetails theRequestDetails, T theResource, boolean thePerformIndexing, boolean theForceUpdateVersion,
|
public ResourceTable updateInternal(RequestDetails theRequestDetails, T theResource, boolean thePerformIndexing, boolean theForceUpdateVersion,
|
||||||
IBasePersistedResource theEntity2, IIdType theResourceId, IBaseResource theOldResource, TransactionDetails theTransactionDetails) {
|
IBasePersistedResource theEntity, IIdType theResourceId, IBaseResource theOldResource, TransactionDetails theTransactionDetails) {
|
||||||
|
|
||||||
ResourceTable entity = (ResourceTable) theEntity2;
|
ResourceTable entity = (ResourceTable) theEntity;
|
||||||
|
|
||||||
// We'll update the resource ID with the correct version later but for
|
// We'll update the resource ID with the correct version later but for
|
||||||
// now at least set it to something useful for the interceptors
|
// now at least set it to something useful for the interceptors
|
||||||
|
|
|
@ -0,0 +1,128 @@
|
||||||
|
package ca.uhn.fhir.jpa.dao;
|
||||||
|
|
||||||
|
/*
|
||||||
|
* #%L
|
||||||
|
* HAPI FHIR JPA Server
|
||||||
|
* %%
|
||||||
|
* Copyright (C) 2014 - 2020 University Health Network
|
||||||
|
* %%
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
* #L%
|
||||||
|
*/
|
||||||
|
|
||||||
|
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||||
|
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoObservation;
|
||||||
|
import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource;
|
||||||
|
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||||
|
import ca.uhn.fhir.jpa.partition.IRequestPartitionHelperSvc;
|
||||||
|
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||||
|
import ca.uhn.fhir.model.api.IQueryParameterType;
|
||||||
|
import ca.uhn.fhir.rest.api.*;
|
||||||
|
import ca.uhn.fhir.rest.api.server.*;
|
||||||
|
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||||
|
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
|
||||||
|
import ca.uhn.fhir.rest.param.ReferenceParam;
|
||||||
|
import org.hl7.fhir.instance.model.api.*;
|
||||||
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Date;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.TreeMap;
|
||||||
|
|
||||||
|
public abstract class BaseHapiFhirResourceDaoObservation<T extends IBaseResource> extends BaseHapiFhirResourceDao<T> implements IFhirResourceDaoObservation<T> {
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
ObservationLastNIndexPersistSvc myObservationLastNIndexPersistSvc;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private IRequestPartitionHelperSvc myRequestPartitionHelperService;
|
||||||
|
|
||||||
|
protected ResourceTable updateObservationEntity(RequestDetails theRequest, IBaseResource theResource, IBasePersistedResource theEntity,
|
||||||
|
Date theDeletedTimestampOrNull, boolean thePerformIndexing, boolean theUpdateVersion,
|
||||||
|
TransactionDetails theTransactionDetails, boolean theForceUpdate, boolean theCreateNewHistoryEntry) {
|
||||||
|
ResourceTable retVal = super.updateEntity(theRequest, theResource, theEntity, theDeletedTimestampOrNull, thePerformIndexing, theUpdateVersion,
|
||||||
|
theTransactionDetails, theForceUpdate, theCreateNewHistoryEntry);
|
||||||
|
|
||||||
|
if (!retVal.isUnchangedInCurrentOperation()) {
|
||||||
|
if (retVal.getDeleted() == null) {
|
||||||
|
// Update indexes here for LastN operation.
|
||||||
|
myObservationLastNIndexPersistSvc.indexObservation(theResource);
|
||||||
|
} else {
|
||||||
|
myObservationLastNIndexPersistSvc.deleteObservationIndex(theEntity);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return retVal;
|
||||||
|
}
|
||||||
|
|
||||||
|
protected void updateSearchParamsForLastn(SearchParameterMap theSearchParameterMap, RequestDetails theRequestDetails) {
|
||||||
|
if (!isPagingProviderDatabaseBacked(theRequestDetails)) {
|
||||||
|
theSearchParameterMap.setLoadSynchronous(true);
|
||||||
|
}
|
||||||
|
|
||||||
|
theSearchParameterMap.setLastN(true);
|
||||||
|
SortSpec effectiveDtm = new SortSpec(getEffectiveParamName()).setOrder(SortOrderEnum.DESC);
|
||||||
|
SortSpec observationCode = new SortSpec(getCodeParamName()).setOrder(SortOrderEnum.ASC).setChain(effectiveDtm);
|
||||||
|
if(theSearchParameterMap.containsKey(getSubjectParamName()) || theSearchParameterMap.containsKey(getPatientParamName())) {
|
||||||
|
fixSubjectParamsOrderForLastn(theSearchParameterMap, theRequestDetails);
|
||||||
|
theSearchParameterMap.setSort(new SortSpec(getSubjectParamName()).setOrder(SortOrderEnum.ASC).setChain(observationCode));
|
||||||
|
} else {
|
||||||
|
theSearchParameterMap.setSort(observationCode);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void fixSubjectParamsOrderForLastn(SearchParameterMap theSearchParameterMap, RequestDetails theRequestDetails) {
|
||||||
|
// Need to ensure that the patient/subject parameters are sorted in the SearchParameterMap to ensure correct ordering of
|
||||||
|
// the output. The reason for this is that observations are indexed by patient/subject forced ID, but then ordered in the
|
||||||
|
// final result set by subject/patient resource PID.
|
||||||
|
TreeMap<Long, IQueryParameterType> orderedSubjectReferenceMap = new TreeMap<>();
|
||||||
|
if(theSearchParameterMap.containsKey(getSubjectParamName())) {
|
||||||
|
|
||||||
|
RequestPartitionId requestPartitionId = myRequestPartitionHelperService.determineReadPartitionForRequest(theRequestDetails, getResourceName());
|
||||||
|
|
||||||
|
List<List<IQueryParameterType>> patientParams = new ArrayList<>();
|
||||||
|
if (theSearchParameterMap.get(getPatientParamName()) != null) {
|
||||||
|
patientParams.addAll(theSearchParameterMap.get(getPatientParamName()));
|
||||||
|
}
|
||||||
|
if (theSearchParameterMap.get(getSubjectParamName()) != null) {
|
||||||
|
patientParams.addAll(theSearchParameterMap.get(getSubjectParamName()));
|
||||||
|
}
|
||||||
|
|
||||||
|
for (List<? extends IQueryParameterType> nextPatientList : patientParams) {
|
||||||
|
for (IQueryParameterType nextOr : nextPatientList) {
|
||||||
|
if (nextOr instanceof ReferenceParam) {
|
||||||
|
ReferenceParam ref = (ReferenceParam) nextOr;
|
||||||
|
ResourcePersistentId pid = myIdHelperService.resolveResourcePersistentIds(requestPartitionId, ref.getResourceType(), ref.getIdPart());
|
||||||
|
orderedSubjectReferenceMap.put(pid.getIdAsLong(), nextOr);
|
||||||
|
} else {
|
||||||
|
throw new IllegalArgumentException("Invalid token type (expecting ReferenceParam): " + nextOr.getClass());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
theSearchParameterMap.remove(getSubjectParamName());
|
||||||
|
theSearchParameterMap.remove(getPatientParamName());
|
||||||
|
for (Long subjectPid : orderedSubjectReferenceMap.keySet()) {
|
||||||
|
theSearchParameterMap.add(getSubjectParamName(), orderedSubjectReferenceMap.get(subjectPid));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
abstract protected String getEffectiveParamName();
|
||||||
|
abstract protected String getCodeParamName();
|
||||||
|
abstract protected String getSubjectParamName();
|
||||||
|
abstract protected String getPatientParamName();
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,228 @@
|
||||||
|
package ca.uhn.fhir.jpa.dao;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.context.*;
|
||||||
|
import ca.uhn.fhir.jpa.dao.data.IObservationIndexedCodeCodingSearchParamDao;
|
||||||
|
import ca.uhn.fhir.jpa.dao.data.IObservationIndexedSearchParamLastNDao;
|
||||||
|
import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource;
|
||||||
|
import ca.uhn.fhir.jpa.model.entity.ObservationIndexedCategoryCodeableConceptEntity;
|
||||||
|
import ca.uhn.fhir.jpa.model.entity.ObservationIndexedCategoryCodingEntity;
|
||||||
|
import ca.uhn.fhir.jpa.model.entity.ObservationIndexedCodeCodeableConceptEntity;
|
||||||
|
import ca.uhn.fhir.jpa.model.entity.ObservationIndexedCodeCodingEntity;
|
||||||
|
import ca.uhn.fhir.jpa.model.entity.ObservationIndexedSearchParamLastNEntity;
|
||||||
|
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamToken;
|
||||||
|
import ca.uhn.fhir.jpa.searchparam.extractor.ISearchParamExtractor;
|
||||||
|
import ca.uhn.fhir.jpa.searchparam.extractor.PathAndRef;
|
||||||
|
import org.hl7.fhir.instance.model.api.*;
|
||||||
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
import org.springframework.transaction.annotation.Propagation;
|
||||||
|
import org.springframework.transaction.annotation.Transactional;
|
||||||
|
|
||||||
|
import javax.persistence.EntityManager;
|
||||||
|
import javax.persistence.PersistenceContext;
|
||||||
|
import javax.persistence.PersistenceContextType;
|
||||||
|
import java.util.*;
|
||||||
|
|
||||||
|
@Transactional(propagation = Propagation.REQUIRED)
|
||||||
|
public class ObservationLastNIndexPersistSvc {
|
||||||
|
|
||||||
|
@PersistenceContext(type = PersistenceContextType.TRANSACTION)
|
||||||
|
protected EntityManager myEntityManager;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
IObservationIndexedSearchParamLastNDao myResourceIndexedObservationLastNDao;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
IObservationIndexedCodeCodingSearchParamDao myObservationIndexedCodeCodingSearchParamDao;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
public ISearchParamExtractor mySearchParameterExtractor;
|
||||||
|
|
||||||
|
public void indexObservation(IBaseResource theResource) {
|
||||||
|
|
||||||
|
List<IBase> subjectReferenceElement = mySearchParameterExtractor.extractValues("Observation.subject", theResource);
|
||||||
|
String subjectId = subjectReferenceElement.stream()
|
||||||
|
.map(refElement -> mySearchParameterExtractor.extractReferenceLinkFromResource(refElement, "Observation.subject"))
|
||||||
|
.filter(Objects::nonNull)
|
||||||
|
.map(PathAndRef::getRef)
|
||||||
|
.filter(Objects::nonNull)
|
||||||
|
.map(subjectRef -> subjectRef.getReferenceElement().getValue())
|
||||||
|
.filter(Objects::nonNull)
|
||||||
|
.findFirst().orElse(null);
|
||||||
|
|
||||||
|
Date effectiveDtm = null;
|
||||||
|
List<IBase> effectiveDateElement = mySearchParameterExtractor.extractValues("Observation.effective", theResource);
|
||||||
|
if (effectiveDateElement.size() > 0) {
|
||||||
|
effectiveDtm = mySearchParameterExtractor.extractDateFromResource(effectiveDateElement.get(0), "Observation.effective");
|
||||||
|
}
|
||||||
|
|
||||||
|
List<IBase> observationCodeCodeableConcepts = mySearchParameterExtractor.extractValues("Observation.code", theResource);
|
||||||
|
|
||||||
|
// Only index for lastn if Observation has a code
|
||||||
|
if (observationCodeCodeableConcepts.size() == 0) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
List<IBase> observationCategoryCodeableConcepts = mySearchParameterExtractor.extractValues("Observation.category", theResource);
|
||||||
|
|
||||||
|
String resourcePID = theResource.getIdElement().getIdPart();
|
||||||
|
|
||||||
|
createOrUpdateIndexedObservation(resourcePID, effectiveDtm, subjectId, observationCodeCodeableConcepts, observationCategoryCodeableConcepts);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private void createOrUpdateIndexedObservation(String resourcePID, Date theEffectiveDtm, String theSubjectId,
|
||||||
|
List<IBase> theObservationCodeCodeableConcepts,
|
||||||
|
List<IBase> theObservationCategoryCodeableConcepts) {
|
||||||
|
|
||||||
|
// Determine if an index already exists for Observation:
|
||||||
|
boolean observationIndexUpdate = false;
|
||||||
|
ObservationIndexedSearchParamLastNEntity indexedObservation = null;
|
||||||
|
if (resourcePID != null) {
|
||||||
|
indexedObservation = myResourceIndexedObservationLastNDao.findByIdentifier(resourcePID);
|
||||||
|
}
|
||||||
|
if (indexedObservation == null) {
|
||||||
|
indexedObservation = new ObservationIndexedSearchParamLastNEntity();
|
||||||
|
} else {
|
||||||
|
observationIndexUpdate = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
indexedObservation.setEffectiveDtm(theEffectiveDtm);
|
||||||
|
indexedObservation.setIdentifier(resourcePID);
|
||||||
|
indexedObservation.setSubject(theSubjectId);
|
||||||
|
|
||||||
|
addCodeToObservationIndex(theObservationCodeCodeableConcepts, indexedObservation);
|
||||||
|
|
||||||
|
addCategoriesToObservationIndex(theObservationCategoryCodeableConcepts, indexedObservation);
|
||||||
|
|
||||||
|
if (observationIndexUpdate) {
|
||||||
|
myEntityManager.merge(indexedObservation);
|
||||||
|
} else {
|
||||||
|
myEntityManager.persist(indexedObservation);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private void addCodeToObservationIndex(List<IBase> theObservationCodeCodeableConcepts,
|
||||||
|
ObservationIndexedSearchParamLastNEntity theIndexedObservation) {
|
||||||
|
// Determine if a Normalized ID was created previously for Observation Code
|
||||||
|
Optional<String> existingObservationCodeNormalizedId = getCodeCodeableConceptIdIfExists(theObservationCodeCodeableConcepts.get(0));
|
||||||
|
|
||||||
|
// Create/update normalized Observation Code index record
|
||||||
|
ObservationIndexedCodeCodeableConceptEntity codeableConceptField =
|
||||||
|
getCodeCodeableConcept(theObservationCodeCodeableConcepts.get(0),
|
||||||
|
existingObservationCodeNormalizedId.orElse(UUID.randomUUID().toString()));
|
||||||
|
|
||||||
|
if (existingObservationCodeNormalizedId.isPresent()) {
|
||||||
|
myEntityManager.merge(codeableConceptField);
|
||||||
|
} else {
|
||||||
|
myEntityManager.persist(codeableConceptField);
|
||||||
|
}
|
||||||
|
|
||||||
|
theIndexedObservation.setObservationCode(codeableConceptField);
|
||||||
|
theIndexedObservation.setCodeNormalizedId(codeableConceptField.getCodeableConceptId());
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private void addCategoriesToObservationIndex(List<IBase> observationCategoryCodeableConcepts,
|
||||||
|
ObservationIndexedSearchParamLastNEntity indexedObservation) {
|
||||||
|
// Build CodeableConcept entities for Observation.Category
|
||||||
|
Set<ObservationIndexedCategoryCodeableConceptEntity> categoryCodeableConceptEntities = new HashSet<>();
|
||||||
|
for (IBase categoryCodeableConcept : observationCategoryCodeableConcepts) {
|
||||||
|
// Build CodeableConcept entities for each category CodeableConcept
|
||||||
|
categoryCodeableConceptEntities.add(getCategoryCodeableConceptEntities(categoryCodeableConcept));
|
||||||
|
}
|
||||||
|
indexedObservation.setCategoryCodeableConcepts(categoryCodeableConceptEntities);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private ObservationIndexedCategoryCodeableConceptEntity getCategoryCodeableConceptEntities(IBase theValue) {
|
||||||
|
String text = mySearchParameterExtractor.getDisplayTextFromCodeableConcept(theValue);
|
||||||
|
ObservationIndexedCategoryCodeableConceptEntity categoryCodeableConcept = new ObservationIndexedCategoryCodeableConceptEntity(text);
|
||||||
|
|
||||||
|
List<IBase> codings = mySearchParameterExtractor.getCodingsFromCodeableConcept(theValue);
|
||||||
|
Set<ObservationIndexedCategoryCodingEntity> categoryCodingEntities = new HashSet<>();
|
||||||
|
for (IBase nextCoding : codings) {
|
||||||
|
categoryCodingEntities.add(getCategoryCoding(nextCoding));
|
||||||
|
}
|
||||||
|
|
||||||
|
categoryCodeableConcept.setObservationIndexedCategoryCodingEntitySet(categoryCodingEntities);
|
||||||
|
|
||||||
|
return categoryCodeableConcept;
|
||||||
|
}
|
||||||
|
|
||||||
|
private ObservationIndexedCodeCodeableConceptEntity getCodeCodeableConcept(IBase theValue, String observationCodeNormalizedId) {
|
||||||
|
String text = mySearchParameterExtractor.getDisplayTextFromCodeableConcept(theValue);
|
||||||
|
ObservationIndexedCodeCodeableConceptEntity codeCodeableConcept = new ObservationIndexedCodeCodeableConceptEntity(text, observationCodeNormalizedId);
|
||||||
|
|
||||||
|
List<IBase> codings = mySearchParameterExtractor.getCodingsFromCodeableConcept(theValue);
|
||||||
|
for (IBase nextCoding : codings) {
|
||||||
|
codeCodeableConcept.addCoding(getCodeCoding(nextCoding, observationCodeNormalizedId));
|
||||||
|
}
|
||||||
|
|
||||||
|
return codeCodeableConcept;
|
||||||
|
}
|
||||||
|
|
||||||
|
private Optional<String> getCodeCodeableConceptIdIfExists(IBase theValue) {
|
||||||
|
List<IBase> codings = mySearchParameterExtractor.getCodingsFromCodeableConcept(theValue);
|
||||||
|
String codeCodeableConceptId = null;
|
||||||
|
Optional<String> codeCodeableConceptIdOptional = Optional.empty();
|
||||||
|
|
||||||
|
for (IBase nextCoding : codings) {
|
||||||
|
ResourceIndexedSearchParamToken param = mySearchParameterExtractor.createSearchParamForCoding("Observation",
|
||||||
|
new RuntimeSearchParam(null, null, "code", null, null, null,
|
||||||
|
null, null, null, null),
|
||||||
|
nextCoding);
|
||||||
|
if (param != null) {
|
||||||
|
String system = param.getSystem();
|
||||||
|
String code = param.getValue();
|
||||||
|
String text = mySearchParameterExtractor.getDisplayTextForCoding(nextCoding);
|
||||||
|
if (code != null && system != null) {
|
||||||
|
codeCodeableConceptIdOptional = Optional.ofNullable(myObservationIndexedCodeCodingSearchParamDao.findByCodeAndSystem(code, system));
|
||||||
|
} else {
|
||||||
|
codeCodeableConceptIdOptional = Optional.ofNullable(myObservationIndexedCodeCodingSearchParamDao.findByDisplay(text));
|
||||||
|
}
|
||||||
|
if (codeCodeableConceptIdOptional.isPresent()) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return codeCodeableConceptIdOptional;
|
||||||
|
}
|
||||||
|
|
||||||
|
private ObservationIndexedCategoryCodingEntity getCategoryCoding(IBase theValue) {
|
||||||
|
ResourceIndexedSearchParamToken param = mySearchParameterExtractor.createSearchParamForCoding("Observation",
|
||||||
|
new RuntimeSearchParam(null, null, "category", null, null, null, null, null, null, null),
|
||||||
|
theValue);
|
||||||
|
ObservationIndexedCategoryCodingEntity observationIndexedCategoryCodingEntity = null;
|
||||||
|
if (param != null) {
|
||||||
|
String system = param.getSystem();
|
||||||
|
String code = param.getValue();
|
||||||
|
String text = mySearchParameterExtractor.getDisplayTextForCoding(theValue);
|
||||||
|
observationIndexedCategoryCodingEntity = new ObservationIndexedCategoryCodingEntity(system, code, text);
|
||||||
|
}
|
||||||
|
return observationIndexedCategoryCodingEntity;
|
||||||
|
}
|
||||||
|
|
||||||
|
private ObservationIndexedCodeCodingEntity getCodeCoding(IBase theValue, String observationCodeNormalizedId) {
|
||||||
|
ResourceIndexedSearchParamToken param = mySearchParameterExtractor.createSearchParamForCoding("Observation",
|
||||||
|
new RuntimeSearchParam(null, null, "code", null, null, null, null, null, null, null),
|
||||||
|
theValue);
|
||||||
|
ObservationIndexedCodeCodingEntity observationIndexedCodeCodingEntity = null;
|
||||||
|
if (param != null) {
|
||||||
|
String system = param.getSystem();
|
||||||
|
String code = param.getValue();
|
||||||
|
String text = mySearchParameterExtractor.getDisplayTextForCoding(theValue);
|
||||||
|
observationIndexedCodeCodingEntity = new ObservationIndexedCodeCodingEntity(system, code, text, observationCodeNormalizedId);
|
||||||
|
}
|
||||||
|
return observationIndexedCodeCodingEntity;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void deleteObservationIndex(IBasePersistedResource theEntity) {
|
||||||
|
ObservationIndexedSearchParamLastNEntity deletedObservationLastNEntity = myResourceIndexedObservationLastNDao.findByIdentifier(theEntity.getIdDt().getIdPart());
|
||||||
|
if (deletedObservationLastNEntity != null) {
|
||||||
|
myEntityManager.remove(deletedObservationLastNEntity);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -48,10 +48,12 @@ import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||||
import ca.uhn.fhir.jpa.model.entity.ResourceTag;
|
import ca.uhn.fhir.jpa.model.entity.ResourceTag;
|
||||||
import ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails;
|
import ca.uhn.fhir.jpa.model.search.SearchRuntimeDetails;
|
||||||
import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage;
|
import ca.uhn.fhir.jpa.model.search.StorageProcessingMessage;
|
||||||
|
import ca.uhn.fhir.jpa.search.lastn.IElasticsearchSvc;
|
||||||
import ca.uhn.fhir.jpa.searchparam.JpaRuntimeSearchParam;
|
import ca.uhn.fhir.jpa.searchparam.JpaRuntimeSearchParam;
|
||||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||||
import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry;
|
import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry;
|
||||||
import ca.uhn.fhir.jpa.searchparam.util.Dstu3DistanceHelper;
|
import ca.uhn.fhir.jpa.searchparam.util.Dstu3DistanceHelper;
|
||||||
|
import ca.uhn.fhir.jpa.searchparam.util.LastNParameterHelper;
|
||||||
import ca.uhn.fhir.jpa.util.BaseIterator;
|
import ca.uhn.fhir.jpa.util.BaseIterator;
|
||||||
import ca.uhn.fhir.jpa.util.CurrentThreadCaptureQueriesListener;
|
import ca.uhn.fhir.jpa.util.CurrentThreadCaptureQueriesListener;
|
||||||
import ca.uhn.fhir.jpa.util.JpaInterceptorBroadcaster;
|
import ca.uhn.fhir.jpa.util.JpaInterceptorBroadcaster;
|
||||||
|
@ -79,7 +81,6 @@ import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
||||||
import ca.uhn.fhir.util.StopWatch;
|
import ca.uhn.fhir.util.StopWatch;
|
||||||
import ca.uhn.fhir.util.UrlUtil;
|
import ca.uhn.fhir.util.UrlUtil;
|
||||||
import com.google.common.annotations.VisibleForTesting;
|
import com.google.common.annotations.VisibleForTesting;
|
||||||
import com.google.common.collect.Lists;
|
|
||||||
import org.apache.commons.lang3.Validate;
|
import org.apache.commons.lang3.Validate;
|
||||||
import org.hibernate.ScrollMode;
|
import org.hibernate.ScrollMode;
|
||||||
import org.hibernate.ScrollableResults;
|
import org.hibernate.ScrollableResults;
|
||||||
|
@ -129,6 +130,8 @@ public class SearchBuilder implements ISearchBuilder {
|
||||||
*/
|
*/
|
||||||
// NB: keep public
|
// NB: keep public
|
||||||
public static final int MAXIMUM_PAGE_SIZE = 800;
|
public static final int MAXIMUM_PAGE_SIZE = 800;
|
||||||
|
public static final int MAXIMUM_PAGE_SIZE_FOR_TESTING = 50;
|
||||||
|
public static boolean myUseMaxPageSize50ForTest = false;
|
||||||
|
|
||||||
private static final List<ResourcePersistentId> EMPTY_LONG_LIST = Collections.unmodifiableList(new ArrayList<>());
|
private static final List<ResourcePersistentId> EMPTY_LONG_LIST = Collections.unmodifiableList(new ArrayList<>());
|
||||||
private static final Logger ourLog = LoggerFactory.getLogger(SearchBuilder.class);
|
private static final Logger ourLog = LoggerFactory.getLogger(SearchBuilder.class);
|
||||||
|
@ -153,6 +156,8 @@ public class SearchBuilder implements ISearchBuilder {
|
||||||
private IdHelperService myIdHelperService;
|
private IdHelperService myIdHelperService;
|
||||||
@Autowired(required = false)
|
@Autowired(required = false)
|
||||||
private IFulltextSearchSvc myFulltextSearchSvc;
|
private IFulltextSearchSvc myFulltextSearchSvc;
|
||||||
|
@Autowired(required = false)
|
||||||
|
private IElasticsearchSvc myIElasticsearchSvc;
|
||||||
@Autowired
|
@Autowired
|
||||||
private ISearchParamRegistry mySearchParamRegistry;
|
private ISearchParamRegistry mySearchParamRegistry;
|
||||||
@Autowired
|
@Autowired
|
||||||
|
@ -178,6 +183,18 @@ public class SearchBuilder implements ISearchBuilder {
|
||||||
myResourceType = theResourceType;
|
myResourceType = theResourceType;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static int getMaximumPageSize() {
|
||||||
|
if (myUseMaxPageSize50ForTest) {
|
||||||
|
return MAXIMUM_PAGE_SIZE_FOR_TESTING;
|
||||||
|
} else {
|
||||||
|
return MAXIMUM_PAGE_SIZE;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static void setMaxPageSize50ForTest(boolean theIsTest) {
|
||||||
|
myUseMaxPageSize50ForTest = theIsTest;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void setMaxResultsToFetch(Integer theMaxResultsToFetch) {
|
public void setMaxResultsToFetch(Integer theMaxResultsToFetch) {
|
||||||
myMaxResultsToFetch = theMaxResultsToFetch;
|
myMaxResultsToFetch = theMaxResultsToFetch;
|
||||||
|
@ -206,6 +223,10 @@ public class SearchBuilder implements ISearchBuilder {
|
||||||
// Handle each parameter
|
// Handle each parameter
|
||||||
for (Map.Entry<String, List<List<IQueryParameterType>>> nextParamEntry : myParams.entrySet()) {
|
for (Map.Entry<String, List<List<IQueryParameterType>>> nextParamEntry : myParams.entrySet()) {
|
||||||
String nextParamName = nextParamEntry.getKey();
|
String nextParamName = nextParamEntry.getKey();
|
||||||
|
if (myParams.isLastN() && LastNParameterHelper.isLastNParameter(nextParamName, myContext)) {
|
||||||
|
// Skip parameters for Subject, Patient, Code and Category for LastN as these will be filtered by Elasticsearch
|
||||||
|
continue;
|
||||||
|
}
|
||||||
List<List<IQueryParameterType>> andOrParams = nextParamEntry.getValue();
|
List<List<IQueryParameterType>> andOrParams = nextParamEntry.getValue();
|
||||||
searchForIdsWithAndOr(myResourceName, nextParamName, andOrParams, theRequest);
|
searchForIdsWithAndOr(myResourceName, nextParamName, andOrParams, theRequest);
|
||||||
}
|
}
|
||||||
|
@ -227,8 +248,8 @@ public class SearchBuilder implements ISearchBuilder {
|
||||||
|
|
||||||
init(theParams, theSearchUuid, theRequestPartitionId);
|
init(theParams, theSearchUuid, theRequestPartitionId);
|
||||||
|
|
||||||
TypedQuery<Long> query = createQuery(null, null, true, theRequest);
|
List<TypedQuery<Long>> queries = createQuery(null, null, true, theRequest, null);
|
||||||
return new CountQueryIterator(query);
|
return new CountQueryIterator(queries.get(0));
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -261,8 +282,81 @@ public class SearchBuilder implements ISearchBuilder {
|
||||||
myRequestPartitionId = theRequestPartitionId;
|
myRequestPartitionId = theRequestPartitionId;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private List<TypedQuery<Long>> createQuery(SortSpec sort, Integer theMaximumResults, boolean theCount, RequestDetails theRequest,
|
||||||
|
SearchRuntimeDetails theSearchRuntimeDetails) {
|
||||||
|
|
||||||
private TypedQuery<Long> createQuery(SortSpec sort, Integer theMaximumResults, boolean theCount, RequestDetails theRequest) {
|
List<ResourcePersistentId> pids = new ArrayList<>();
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Fulltext or lastn search
|
||||||
|
*/
|
||||||
|
if (myParams.containsKey(Constants.PARAM_CONTENT) || myParams.containsKey(Constants.PARAM_TEXT) || myParams.isLastN()) {
|
||||||
|
if (myParams.containsKey(Constants.PARAM_CONTENT) || myParams.containsKey(Constants.PARAM_TEXT)) {
|
||||||
|
if (myFulltextSearchSvc == null) {
|
||||||
|
if (myParams.containsKey(Constants.PARAM_TEXT)) {
|
||||||
|
throw new InvalidRequestException("Fulltext search is not enabled on this service, can not process parameter: " + Constants.PARAM_TEXT);
|
||||||
|
} else if (myParams.containsKey(Constants.PARAM_CONTENT)) {
|
||||||
|
throw new InvalidRequestException("Fulltext search is not enabled on this service, can not process parameter: " + Constants.PARAM_CONTENT);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (myParams.getEverythingMode() != null) {
|
||||||
|
pids = myFulltextSearchSvc.everything(myResourceName, myParams, theRequest);
|
||||||
|
} else {
|
||||||
|
pids = myFulltextSearchSvc.search(myResourceName, myParams);
|
||||||
|
}
|
||||||
|
} else if (myParams.isLastN()) {
|
||||||
|
if (myIElasticsearchSvc == null) {
|
||||||
|
if (myParams.isLastN()) {
|
||||||
|
throw new InvalidRequestException("LastN operation is not enabled on this service, can not process this request");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if(myParams.getLastNMax() == null) {
|
||||||
|
throw new InvalidRequestException("Max parameter is required for $lastn operation");
|
||||||
|
}
|
||||||
|
List<String> lastnResourceIds = myIElasticsearchSvc.executeLastN(myParams, myContext, theMaximumResults);
|
||||||
|
for (String lastnResourceId : lastnResourceIds) {
|
||||||
|
pids.add(myIdHelperService.resolveResourcePersistentIds(myRequestPartitionId, myResourceName, lastnResourceId));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (theSearchRuntimeDetails != null) {
|
||||||
|
theSearchRuntimeDetails.setFoundIndexMatchesCount(pids.size());
|
||||||
|
HookParams params = new HookParams()
|
||||||
|
.add(RequestDetails.class, theRequest)
|
||||||
|
.addIfMatchesType(ServletRequestDetails.class, theRequest)
|
||||||
|
.add(SearchRuntimeDetails.class, theSearchRuntimeDetails);
|
||||||
|
JpaInterceptorBroadcaster.doCallHooks(myInterceptorBroadcaster, theRequest, Pointcut.JPA_PERFTRACE_INDEXSEARCH_QUERY_COMPLETE, params);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (pids.isEmpty()) {
|
||||||
|
// Will never match
|
||||||
|
pids = Collections.singletonList(new ResourcePersistentId(-1L));
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
ArrayList<TypedQuery<Long>> myQueries = new ArrayList<>();
|
||||||
|
|
||||||
|
if (!pids.isEmpty()) {
|
||||||
|
if (theMaximumResults != null && pids.size() > theMaximumResults) {
|
||||||
|
pids.subList(0,theMaximumResults-1);
|
||||||
|
}
|
||||||
|
new QueryChunker<Long>().chunk(ResourcePersistentId.toLongList(pids), t-> doCreateChunkedQueries(t, sort, theCount, theRequest, myQueries));
|
||||||
|
} else {
|
||||||
|
myQueries.add(createChunkedQuery(sort,theMaximumResults, theCount, theRequest, null));
|
||||||
|
}
|
||||||
|
|
||||||
|
return myQueries;
|
||||||
|
}
|
||||||
|
|
||||||
|
private void doCreateChunkedQueries(List<Long> thePids, SortSpec sort, boolean theCount, RequestDetails theRequest, ArrayList<TypedQuery<Long>> theQueries) {
|
||||||
|
if(thePids.size() < getMaximumPageSize()) {
|
||||||
|
normalizeIdListForLastNInClause(thePids);
|
||||||
|
}
|
||||||
|
theQueries.add(createChunkedQuery(sort, thePids.size(), theCount, theRequest, thePids));
|
||||||
|
}
|
||||||
|
|
||||||
|
private TypedQuery<Long> createChunkedQuery(SortSpec sort, Integer theMaximumResults, boolean theCount, RequestDetails theRequest, List<Long> thePidList) {
|
||||||
/*
|
/*
|
||||||
* Sort
|
* Sort
|
||||||
*
|
*
|
||||||
|
@ -310,30 +404,9 @@ public class SearchBuilder implements ISearchBuilder {
|
||||||
searchForIdsWithAndOr(myParams, theRequest);
|
searchForIdsWithAndOr(myParams, theRequest);
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
// Add PID list predicate for full text search and/or lastn operation
|
||||||
* Fulltext search
|
if (thePidList != null && thePidList.size() > 0) {
|
||||||
*/
|
myQueryStack.addPredicate(myQueryStack.get("myId").as(Long.class).in(thePidList));
|
||||||
if (myParams.containsKey(Constants.PARAM_CONTENT) || myParams.containsKey(Constants.PARAM_TEXT)) {
|
|
||||||
if (myFulltextSearchSvc == null) {
|
|
||||||
if (myParams.containsKey(Constants.PARAM_TEXT)) {
|
|
||||||
throw new InvalidRequestException("Fulltext search is not enabled on this service, can not process parameter: " + Constants.PARAM_TEXT);
|
|
||||||
} else if (myParams.containsKey(Constants.PARAM_CONTENT)) {
|
|
||||||
throw new InvalidRequestException("Fulltext search is not enabled on this service, can not process parameter: " + Constants.PARAM_CONTENT);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
List<ResourcePersistentId> pids;
|
|
||||||
if (myParams.getEverythingMode() != null) {
|
|
||||||
pids = myFulltextSearchSvc.everything(myResourceName, myParams, theRequest);
|
|
||||||
} else {
|
|
||||||
pids = myFulltextSearchSvc.search(myResourceName, myParams);
|
|
||||||
}
|
|
||||||
if (pids.isEmpty()) {
|
|
||||||
// Will never match
|
|
||||||
pids = Collections.singletonList(new ResourcePersistentId(-1L));
|
|
||||||
}
|
|
||||||
|
|
||||||
myQueryStack.addPredicate(myQueryStack.get("myId").as(Long.class).in(ResourcePersistentId.toLongList(pids)));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Last updated
|
// Last updated
|
||||||
|
@ -355,6 +428,40 @@ public class SearchBuilder implements ISearchBuilder {
|
||||||
return query;
|
return query;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private List<Long> normalizeIdListForLastNInClause(List<Long> lastnResourceIds) {
|
||||||
|
/*
|
||||||
|
The following is a workaround to a known issue involving Hibernate. If queries are used with "in" clauses with large and varying
|
||||||
|
numbers of parameters, this can overwhelm Hibernate's QueryPlanCache and deplete heap space. See the following link for more info:
|
||||||
|
https://stackoverflow.com/questions/31557076/spring-hibernate-query-plan-cache-memory-usage.
|
||||||
|
|
||||||
|
Normalizing the number of parameters in the "in" clause stabilizes the size of the QueryPlanCache, so long as the number of
|
||||||
|
arguments never exceeds the maximum specified below.
|
||||||
|
*/
|
||||||
|
int listSize = lastnResourceIds.size();
|
||||||
|
|
||||||
|
if(listSize > 1 && listSize < 10) {
|
||||||
|
padIdListWithPlaceholders(lastnResourceIds, 10);
|
||||||
|
} else if (listSize > 10 && listSize < 50) {
|
||||||
|
padIdListWithPlaceholders(lastnResourceIds, 50);
|
||||||
|
} else if (listSize > 50 && listSize < 100) {
|
||||||
|
padIdListWithPlaceholders(lastnResourceIds, 100);
|
||||||
|
} else if (listSize > 100 && listSize < 200) {
|
||||||
|
padIdListWithPlaceholders(lastnResourceIds, 200);
|
||||||
|
} else if (listSize > 200 && listSize < 500) {
|
||||||
|
padIdListWithPlaceholders(lastnResourceIds, 500);
|
||||||
|
} else if (listSize > 500 && listSize < 800) {
|
||||||
|
padIdListWithPlaceholders(lastnResourceIds, 800);
|
||||||
|
}
|
||||||
|
|
||||||
|
return lastnResourceIds;
|
||||||
|
}
|
||||||
|
|
||||||
|
private void padIdListWithPlaceholders(List<Long> theIdList, int preferredListSize) {
|
||||||
|
while(theIdList.size() < preferredListSize) {
|
||||||
|
theIdList.add(-1L);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @return Returns {@literal true} if any search parameter sorts were found, or false if
|
* @return Returns {@literal true} if any search parameter sorts were found, or false if
|
||||||
* no sorts were found, or only non-search parameters ones (e.g. _id, _lastUpdated)
|
* no sorts were found, or only non-search parameters ones (e.g. _id, _lastUpdated)
|
||||||
|
@ -478,10 +585,17 @@ public class SearchBuilder implements ISearchBuilder {
|
||||||
|
|
||||||
|
|
||||||
private void doLoadPids(Collection<ResourcePersistentId> thePids, Collection<ResourcePersistentId> theIncludedPids, List<IBaseResource> theResourceListToPopulate, boolean theForHistoryOperation,
|
private void doLoadPids(Collection<ResourcePersistentId> thePids, Collection<ResourcePersistentId> theIncludedPids, List<IBaseResource> theResourceListToPopulate, boolean theForHistoryOperation,
|
||||||
Map<ResourcePersistentId, Integer> thePosition, RequestDetails theRequest) {
|
Map<ResourcePersistentId, Integer> thePosition) {
|
||||||
|
|
||||||
|
List<Long> myLongPersistentIds;
|
||||||
|
if(thePids.size() < getMaximumPageSize()) {
|
||||||
|
myLongPersistentIds = normalizeIdListForLastNInClause(ResourcePersistentId.toLongList(thePids));
|
||||||
|
} else {
|
||||||
|
myLongPersistentIds = ResourcePersistentId.toLongList(thePids);
|
||||||
|
}
|
||||||
|
|
||||||
// -- get the resource from the searchView
|
// -- get the resource from the searchView
|
||||||
Collection<ResourceSearchView> resourceSearchViewList = myResourceSearchViewDao.findByResourceIds(ResourcePersistentId.toLongList(thePids));
|
Collection<ResourceSearchView> resourceSearchViewList = myResourceSearchViewDao.findByResourceIds(myLongPersistentIds);
|
||||||
|
|
||||||
//-- preload all tags with tag definition if any
|
//-- preload all tags with tag definition if any
|
||||||
Map<ResourcePersistentId, Collection<ResourceTag>> tagMap = getResourceTagMap(resourceSearchViewList);
|
Map<ResourcePersistentId, Collection<ResourceTag>> tagMap = getResourceTagMap(resourceSearchViewList);
|
||||||
|
@ -581,9 +695,7 @@ public class SearchBuilder implements ISearchBuilder {
|
||||||
}
|
}
|
||||||
|
|
||||||
List<ResourcePersistentId> pids = new ArrayList<>(thePids);
|
List<ResourcePersistentId> pids = new ArrayList<>(thePids);
|
||||||
new QueryChunker<ResourcePersistentId>().chunk(pids, t -> {
|
new QueryChunker<ResourcePersistentId>().chunk(pids, t -> doLoadPids(t, theIncludedPids, theResourceListToPopulate, theForHistoryOperation, position));
|
||||||
doLoadPids(t, theIncludedPids, theResourceListToPopulate, theForHistoryOperation, position, theDetails);
|
|
||||||
});
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -627,7 +739,7 @@ public class SearchBuilder implements ISearchBuilder {
|
||||||
if (matchAll) {
|
if (matchAll) {
|
||||||
String sql;
|
String sql;
|
||||||
sql = "SELECT r." + findFieldName + " FROM ResourceLink r WHERE r." + searchFieldName + " IN (:target_pids) ";
|
sql = "SELECT r." + findFieldName + " FROM ResourceLink r WHERE r." + searchFieldName + " IN (:target_pids) ";
|
||||||
List<Collection<ResourcePersistentId>> partitions = partition(nextRoundMatches, MAXIMUM_PAGE_SIZE);
|
List<Collection<ResourcePersistentId>> partitions = partition(nextRoundMatches, getMaximumPageSize());
|
||||||
for (Collection<ResourcePersistentId> nextPartition : partitions) {
|
for (Collection<ResourcePersistentId> nextPartition : partitions) {
|
||||||
TypedQuery<Long> q = theEntityManager.createQuery(sql, Long.class);
|
TypedQuery<Long> q = theEntityManager.createQuery(sql, Long.class);
|
||||||
q.setParameter("target_pids", ResourcePersistentId.toLongList(nextPartition));
|
q.setParameter("target_pids", ResourcePersistentId.toLongList(nextPartition));
|
||||||
|
@ -680,7 +792,7 @@ public class SearchBuilder implements ISearchBuilder {
|
||||||
sql = "SELECT r." + findFieldName + " FROM ResourceLink r WHERE r.mySourcePath = :src_path AND r." + searchFieldName + " IN (:target_pids)";
|
sql = "SELECT r." + findFieldName + " FROM ResourceLink r WHERE r.mySourcePath = :src_path AND r." + searchFieldName + " IN (:target_pids)";
|
||||||
}
|
}
|
||||||
|
|
||||||
List<Collection<ResourcePersistentId>> partitions = partition(nextRoundMatches, MAXIMUM_PAGE_SIZE);
|
List<Collection<ResourcePersistentId>> partitions = partition(nextRoundMatches, getMaximumPageSize());
|
||||||
for (Collection<ResourcePersistentId> nextPartition : partitions) {
|
for (Collection<ResourcePersistentId> nextPartition : partitions) {
|
||||||
TypedQuery<Long> q = theEntityManager.createQuery(sql, Long.class);
|
TypedQuery<Long> q = theEntityManager.createQuery(sql, Long.class);
|
||||||
q.setParameter("src_path", nextPath);
|
q.setParameter("src_path", nextPath);
|
||||||
|
@ -925,9 +1037,8 @@ public class SearchBuilder implements ISearchBuilder {
|
||||||
|
|
||||||
private final RequestDetails myRequest;
|
private final RequestDetails myRequest;
|
||||||
private Iterator<ResourcePersistentId> myCurrentIterator;
|
private Iterator<ResourcePersistentId> myCurrentIterator;
|
||||||
private Set<ResourcePersistentId> myCurrentPids;
|
private final Set<ResourcePersistentId> myCurrentPids;
|
||||||
private ResourcePersistentId myNext;
|
private ResourcePersistentId myNext;
|
||||||
private int myPageSize = myDaoConfig.getEverythingIncludesFetchPageSize();
|
|
||||||
|
|
||||||
IncludesIterator(Set<ResourcePersistentId> thePidSet, RequestDetails theRequest) {
|
IncludesIterator(Set<ResourcePersistentId> thePidSet, RequestDetails theRequest) {
|
||||||
myCurrentPids = new HashSet<>(thePidSet);
|
myCurrentPids = new HashSet<>(thePidSet);
|
||||||
|
@ -980,11 +1091,13 @@ public class SearchBuilder implements ISearchBuilder {
|
||||||
private ResourcePersistentId myNext;
|
private ResourcePersistentId myNext;
|
||||||
private Iterator<ResourcePersistentId> myPreResultsIterator;
|
private Iterator<ResourcePersistentId> myPreResultsIterator;
|
||||||
private ScrollableResultsIterator<Long> myResultsIterator;
|
private ScrollableResultsIterator<Long> myResultsIterator;
|
||||||
private SortSpec mySort;
|
private final SortSpec mySort;
|
||||||
private boolean myStillNeedToFetchIncludes;
|
private boolean myStillNeedToFetchIncludes;
|
||||||
private int mySkipCount = 0;
|
private int mySkipCount = 0;
|
||||||
private int myNonSkipCount = 0;
|
private int myNonSkipCount = 0;
|
||||||
|
|
||||||
|
private List<TypedQuery<Long>> myQueryList = new ArrayList<>();
|
||||||
|
|
||||||
private QueryIterator(SearchRuntimeDetails theSearchRuntimeDetails, RequestDetails theRequest) {
|
private QueryIterator(SearchRuntimeDetails theSearchRuntimeDetails, RequestDetails theRequest) {
|
||||||
mySearchRuntimeDetails = theSearchRuntimeDetails;
|
mySearchRuntimeDetails = theSearchRuntimeDetails;
|
||||||
mySort = myParams.getSort();
|
mySort = myParams.getSort();
|
||||||
|
@ -1035,7 +1148,12 @@ public class SearchBuilder implements ISearchBuilder {
|
||||||
}
|
}
|
||||||
|
|
||||||
if (myNext == null) {
|
if (myNext == null) {
|
||||||
while (myResultsIterator.hasNext()) {
|
while (myResultsIterator.hasNext() || !myQueryList.isEmpty()) {
|
||||||
|
// Update iterator with next chunk if necessary.
|
||||||
|
if (!myResultsIterator.hasNext()) {
|
||||||
|
retrieveNextIteratorQuery();
|
||||||
|
}
|
||||||
|
|
||||||
Long nextLong = myResultsIterator.next();
|
Long nextLong = myResultsIterator.next();
|
||||||
if (myHavePerfTraceFoundIdHook) {
|
if (myHavePerfTraceFoundIdHook) {
|
||||||
HookParams params = new HookParams()
|
HookParams params = new HookParams()
|
||||||
|
@ -1134,19 +1252,33 @@ public class SearchBuilder implements ISearchBuilder {
|
||||||
}
|
}
|
||||||
|
|
||||||
private void initializeIteratorQuery(Integer theMaxResultsToFetch) {
|
private void initializeIteratorQuery(Integer theMaxResultsToFetch) {
|
||||||
final TypedQuery<Long> query = createQuery(mySort, theMaxResultsToFetch, false, myRequest);
|
if (myQueryList.isEmpty()) {
|
||||||
|
// Capture times for Lucene/Elasticsearch queries as well
|
||||||
|
mySearchRuntimeDetails.setQueryStopwatch(new StopWatch());
|
||||||
|
myQueryList = createQuery(mySort, theMaxResultsToFetch, false, myRequest, mySearchRuntimeDetails);
|
||||||
|
}
|
||||||
|
|
||||||
mySearchRuntimeDetails.setQueryStopwatch(new StopWatch());
|
mySearchRuntimeDetails.setQueryStopwatch(new StopWatch());
|
||||||
|
|
||||||
Query<Long> hibernateQuery = (Query<Long>) query;
|
retrieveNextIteratorQuery();
|
||||||
hibernateQuery.setFetchSize(myFetchSize);
|
|
||||||
ScrollableResults scroll = hibernateQuery.scroll(ScrollMode.FORWARD_ONLY);
|
|
||||||
myResultsIterator = new ScrollableResultsIterator<>(scroll);
|
|
||||||
|
|
||||||
mySkipCount = 0;
|
mySkipCount = 0;
|
||||||
myNonSkipCount = 0;
|
myNonSkipCount = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private void retrieveNextIteratorQuery() {
|
||||||
|
if (myQueryList != null && myQueryList.size() > 0) {
|
||||||
|
final TypedQuery<Long> query = myQueryList.remove(0);
|
||||||
|
Query<Long> hibernateQuery = (Query<Long>) (query);
|
||||||
|
hibernateQuery.setFetchSize(myFetchSize);
|
||||||
|
ScrollableResults scroll = hibernateQuery.scroll(ScrollMode.FORWARD_ONLY);
|
||||||
|
myResultsIterator = new ScrollableResultsIterator<>(scroll);
|
||||||
|
} else {
|
||||||
|
myResultsIterator = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean hasNext() {
|
public boolean hasNext() {
|
||||||
if (myNext == null) {
|
if (myNext == null) {
|
||||||
|
|
|
@ -0,0 +1,17 @@
|
||||||
|
package ca.uhn.fhir.jpa.dao.data;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.model.entity.ObservationIndexedCodeCodeableConceptEntity;
|
||||||
|
import org.springframework.data.jpa.repository.JpaRepository;
|
||||||
|
import org.springframework.data.jpa.repository.Query;
|
||||||
|
import org.springframework.data.repository.query.Param;
|
||||||
|
import org.springframework.stereotype.Repository;
|
||||||
|
|
||||||
|
@Repository
|
||||||
|
public interface IObservationIndexedCodeCodeableConceptSearchParamDao extends JpaRepository<ObservationIndexedCodeCodeableConceptEntity, Long> {
|
||||||
|
@Query("" +
|
||||||
|
"SELECT t FROM ObservationIndexedCodeCodeableConceptEntity t " +
|
||||||
|
"WHERE t.myCodeableConceptId = :codeableConceptId" +
|
||||||
|
"")
|
||||||
|
ObservationIndexedCodeCodeableConceptEntity findByCodeableConceptId(@Param("codeableConceptId") String theCodeableConceptId);
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,26 @@
|
||||||
|
package ca.uhn.fhir.jpa.dao.data;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.model.entity.ObservationIndexedCodeCodingEntity;
|
||||||
|
import org.springframework.data.jpa.repository.JpaRepository;
|
||||||
|
import org.springframework.data.jpa.repository.Query;
|
||||||
|
import org.springframework.data.repository.query.Param;
|
||||||
|
import org.springframework.stereotype.Repository;
|
||||||
|
|
||||||
|
@Repository
|
||||||
|
public interface IObservationIndexedCodeCodingSearchParamDao extends JpaRepository<ObservationIndexedCodeCodingEntity, Long> {
|
||||||
|
|
||||||
|
@Query("" +
|
||||||
|
"SELECT t.myCodeableConceptId FROM ObservationIndexedCodeCodingEntity t " +
|
||||||
|
"WHERE t.myCode = :code " +
|
||||||
|
"AND t.mySystem = :system " +
|
||||||
|
"")
|
||||||
|
String findByCodeAndSystem(@Param("code") String theCode, @Param("system") String theSystem);
|
||||||
|
|
||||||
|
|
||||||
|
@Query("" +
|
||||||
|
"SELECT t.myCodeableConceptId FROM ObservationIndexedCodeCodingEntity t " +
|
||||||
|
"WHERE t.myDisplay = :display" +
|
||||||
|
"")
|
||||||
|
String findByDisplay(@Param("display") String theDisplay);
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,17 @@
|
||||||
|
package ca.uhn.fhir.jpa.dao.data;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.model.entity.ObservationIndexedSearchParamLastNEntity;
|
||||||
|
import org.springframework.data.jpa.repository.JpaRepository;
|
||||||
|
import org.springframework.data.jpa.repository.Query;
|
||||||
|
import org.springframework.data.repository.query.Param;
|
||||||
|
import org.springframework.stereotype.Repository;
|
||||||
|
|
||||||
|
@Repository
|
||||||
|
public interface IObservationIndexedSearchParamLastNDao extends JpaRepository<ObservationIndexedSearchParamLastNEntity, Long>{
|
||||||
|
@Query("" +
|
||||||
|
"SELECT t FROM ObservationIndexedSearchParamLastNEntity t " +
|
||||||
|
"WHERE t.myIdentifier = :identifier" +
|
||||||
|
"")
|
||||||
|
ObservationIndexedSearchParamLastNEntity findByIdentifier(@Param("identifier") String theIdentifier);
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,81 @@
|
||||||
|
package ca.uhn.fhir.jpa.dao.dstu3;
|
||||||
|
|
||||||
|
/*
|
||||||
|
* #%L
|
||||||
|
* HAPI FHIR JPA Server
|
||||||
|
* %%
|
||||||
|
* Copyright (C) 2014 - 2020 University Health Network
|
||||||
|
* %%
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
* #L%
|
||||||
|
*/
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDaoObservation;
|
||||||
|
import ca.uhn.fhir.jpa.dao.ObservationLastNIndexPersistSvc;
|
||||||
|
import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource;
|
||||||
|
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||||
|
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||||
|
import ca.uhn.fhir.rest.api.CacheControlDirective;
|
||||||
|
import ca.uhn.fhir.rest.api.Constants;
|
||||||
|
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||||
|
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||||
|
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
|
||||||
|
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||||
|
import org.hl7.fhir.dstu3.model.Observation;
|
||||||
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
|
||||||
|
import javax.servlet.http.HttpServletResponse;
|
||||||
|
import java.util.Date;
|
||||||
|
|
||||||
|
public class FhirResourceDaoObservationDstu3 extends BaseHapiFhirResourceDaoObservation<Observation> {
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
ObservationLastNIndexPersistSvc myObservationLastNIndexPersistSvc;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public IBundleProvider observationsLastN(SearchParameterMap theSearchParameterMap, RequestDetails theRequestDetails, HttpServletResponse theServletResponse) {
|
||||||
|
|
||||||
|
updateSearchParamsForLastn(theSearchParameterMap, theRequestDetails);
|
||||||
|
|
||||||
|
return mySearchCoordinatorSvc.registerSearch(this, theSearchParameterMap, getResourceName(), new CacheControlDirective().parse(theRequestDetails.getHeaders(Constants.HEADER_CACHE_CONTROL)), theRequestDetails);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected String getEffectiveParamName() {
|
||||||
|
return Observation.SP_DATE;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected String getCodeParamName() {
|
||||||
|
return Observation.SP_CODE;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected String getSubjectParamName() {
|
||||||
|
return Observation.SP_SUBJECT;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected String getPatientParamName() {
|
||||||
|
return Observation.SP_PATIENT;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ResourceTable updateEntity(RequestDetails theRequest, IBaseResource theResource, IBasePersistedResource theEntity, Date theDeletedTimestampOrNull, boolean thePerformIndexing,
|
||||||
|
boolean theUpdateVersion, TransactionDetails theTransactionDetails, boolean theForceUpdate, boolean theCreateNewHistoryEntry) {
|
||||||
|
return updateObservationEntity(theRequest, theResource, theEntity, theDeletedTimestampOrNull,
|
||||||
|
thePerformIndexing, theUpdateVersion, theTransactionDetails, theForceUpdate,
|
||||||
|
theCreateNewHistoryEntry);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -35,6 +35,7 @@ import javax.persistence.PersistenceContext;
|
||||||
import javax.persistence.PersistenceContextType;
|
import javax.persistence.PersistenceContextType;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
|
import java.util.HashSet;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
@Service
|
@Service
|
||||||
|
@ -67,13 +68,20 @@ public class DaoSearchParamSynchronizer {
|
||||||
}
|
}
|
||||||
|
|
||||||
private <T extends BaseResourceIndex> void synchronize(ResourceTable theEntity, AddRemoveCount theAddRemoveCount, Collection<T> theNewParams, Collection<T> theExistingParams) {
|
private <T extends BaseResourceIndex> void synchronize(ResourceTable theEntity, AddRemoveCount theAddRemoveCount, Collection<T> theNewParams, Collection<T> theExistingParams) {
|
||||||
for (T next : theNewParams) {
|
Collection<T> newParams = theNewParams;
|
||||||
|
for (T next : newParams) {
|
||||||
next.setPartitionId(theEntity.getPartitionId());
|
next.setPartitionId(theEntity.getPartitionId());
|
||||||
next.calculateHashes();
|
next.calculateHashes();
|
||||||
}
|
}
|
||||||
|
|
||||||
List<T> paramsToRemove = subtract(theExistingParams, theNewParams);
|
/*
|
||||||
List<T> paramsToAdd = subtract(theNewParams, theExistingParams);
|
* HashCodes may have changed as a result of setting the partition ID, so
|
||||||
|
* create a new set that will reflect the new hashcodes
|
||||||
|
*/
|
||||||
|
newParams = new HashSet<>(newParams);
|
||||||
|
|
||||||
|
List<T> paramsToRemove = subtract(theExistingParams, newParams);
|
||||||
|
List<T> paramsToAdd = subtract(newParams, theExistingParams);
|
||||||
tryToReuseIndexEntities(paramsToRemove, paramsToAdd);
|
tryToReuseIndexEntities(paramsToRemove, paramsToAdd);
|
||||||
|
|
||||||
for (T next : paramsToRemove) {
|
for (T next : paramsToRemove) {
|
||||||
|
@ -127,8 +135,12 @@ public class DaoSearchParamSynchronizer {
|
||||||
return new ArrayList<>();
|
return new ArrayList<>();
|
||||||
}
|
}
|
||||||
|
|
||||||
ArrayList<T> retVal = new ArrayList<>(theSubtractFrom);
|
ArrayList<T> retVal = new ArrayList<>();
|
||||||
retVal.removeAll(theToSubtract);
|
for (T next : theSubtractFrom) {
|
||||||
|
if (!theToSubtract.contains(next)) {
|
||||||
|
retVal.add(next);
|
||||||
|
}
|
||||||
|
}
|
||||||
return retVal;
|
return retVal;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,88 @@
|
||||||
|
package ca.uhn.fhir.jpa.dao.r4;
|
||||||
|
|
||||||
|
/*
|
||||||
|
* #%L
|
||||||
|
* HAPI FHIR JPA Server
|
||||||
|
* %%
|
||||||
|
* Copyright (C) 2014 - 2020 University Health Network
|
||||||
|
* %%
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
* #L%
|
||||||
|
*/
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDaoObservation;
|
||||||
|
import ca.uhn.fhir.jpa.dao.ObservationLastNIndexPersistSvc;
|
||||||
|
import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource;
|
||||||
|
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||||
|
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||||
|
import ca.uhn.fhir.rest.api.CacheControlDirective;
|
||||||
|
import ca.uhn.fhir.rest.api.Constants;
|
||||||
|
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||||
|
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||||
|
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
|
||||||
|
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||||
|
import org.hl7.fhir.r4.model.Observation;
|
||||||
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
|
||||||
|
import javax.persistence.EntityManager;
|
||||||
|
import javax.persistence.PersistenceContext;
|
||||||
|
import javax.persistence.PersistenceContextType;
|
||||||
|
import javax.servlet.http.HttpServletResponse;
|
||||||
|
import java.util.Date;
|
||||||
|
|
||||||
|
public class FhirResourceDaoObservationR4 extends BaseHapiFhirResourceDaoObservation<Observation> {
|
||||||
|
|
||||||
|
|
||||||
|
@PersistenceContext(type = PersistenceContextType.TRANSACTION)
|
||||||
|
protected EntityManager myEntityManager;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
ObservationLastNIndexPersistSvc myObservationLastNIndexPersistSvc;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public IBundleProvider observationsLastN(SearchParameterMap theSearchParameterMap, RequestDetails theRequestDetails, HttpServletResponse theServletResponse) {
|
||||||
|
|
||||||
|
updateSearchParamsForLastn(theSearchParameterMap, theRequestDetails);
|
||||||
|
|
||||||
|
return mySearchCoordinatorSvc.registerSearch(this, theSearchParameterMap, getResourceName(), new CacheControlDirective().parse(theRequestDetails.getHeaders(Constants.HEADER_CACHE_CONTROL)), theRequestDetails);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected String getEffectiveParamName() {
|
||||||
|
return Observation.SP_DATE;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected String getCodeParamName() {
|
||||||
|
return Observation.SP_CODE;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected String getSubjectParamName() {
|
||||||
|
return Observation.SP_SUBJECT;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected String getPatientParamName() {
|
||||||
|
return Observation.SP_PATIENT;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ResourceTable updateEntity(RequestDetails theRequest, IBaseResource theResource, IBasePersistedResource theEntity, Date theDeletedTimestampOrNull, boolean thePerformIndexing,
|
||||||
|
boolean theUpdateVersion, TransactionDetails theTransactionDetails, boolean theForceUpdate, boolean theCreateNewHistoryEntry) {
|
||||||
|
return updateObservationEntity(theRequest, theResource, theEntity, theDeletedTimestampOrNull,
|
||||||
|
thePerformIndexing, theUpdateVersion, theTransactionDetails, theForceUpdate,
|
||||||
|
theCreateNewHistoryEntry);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,81 @@
|
||||||
|
package ca.uhn.fhir.jpa.dao.r5;
|
||||||
|
|
||||||
|
/*
|
||||||
|
* #%L
|
||||||
|
* HAPI FHIR JPA Server
|
||||||
|
* %%
|
||||||
|
* Copyright (C) 2014 - 2020 University Health Network
|
||||||
|
* %%
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
* #L%
|
||||||
|
*/
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDaoObservation;
|
||||||
|
import ca.uhn.fhir.jpa.dao.ObservationLastNIndexPersistSvc;
|
||||||
|
import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource;
|
||||||
|
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||||
|
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||||
|
import ca.uhn.fhir.rest.api.CacheControlDirective;
|
||||||
|
import ca.uhn.fhir.rest.api.Constants;
|
||||||
|
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||||
|
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||||
|
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
|
||||||
|
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||||
|
import org.hl7.fhir.r5.model.Observation;
|
||||||
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
|
||||||
|
import javax.servlet.http.HttpServletResponse;
|
||||||
|
import java.util.Date;
|
||||||
|
|
||||||
|
public class FhirResourceDaoObservationR5 extends BaseHapiFhirResourceDaoObservation<Observation> {
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
ObservationLastNIndexPersistSvc myObservationLastNIndexPersistSvc;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public IBundleProvider observationsLastN(SearchParameterMap theSearchParameterMap, RequestDetails theRequestDetails, HttpServletResponse theServletResponse) {
|
||||||
|
|
||||||
|
updateSearchParamsForLastn(theSearchParameterMap, theRequestDetails);
|
||||||
|
|
||||||
|
return mySearchCoordinatorSvc.registerSearch(this, theSearchParameterMap, getResourceName(), new CacheControlDirective().parse(theRequestDetails.getHeaders(Constants.HEADER_CACHE_CONTROL)), theRequestDetails);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected String getEffectiveParamName() {
|
||||||
|
return Observation.SP_DATE;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected String getCodeParamName() {
|
||||||
|
return Observation.SP_CODE;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected String getSubjectParamName() {
|
||||||
|
return Observation.SP_SUBJECT;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected String getPatientParamName() {
|
||||||
|
return Observation.SP_PATIENT;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ResourceTable updateEntity(RequestDetails theRequest, IBaseResource theResource, IBasePersistedResource theEntity, Date theDeletedTimestampOrNull, boolean thePerformIndexing,
|
||||||
|
boolean theUpdateVersion, TransactionDetails theTransactionDetails, boolean theForceUpdate, boolean theCreateNewHistoryEntry) {
|
||||||
|
return updateObservationEntity(theRequest, theResource, theEntity, theDeletedTimestampOrNull,
|
||||||
|
thePerformIndexing, theUpdateVersion, theTransactionDetails, theForceUpdate,
|
||||||
|
theCreateNewHistoryEntry);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -79,6 +79,11 @@ public class PerformanceTracingLoggingInterceptor {
|
||||||
log("SqlQuery {} failed in {} - Found {} matches", theOutcome.getSearchUuid(), theOutcome.getQueryStopwatch(), theOutcome.getFoundMatchesCount());
|
log("SqlQuery {} failed in {} - Found {} matches", theOutcome.getSearchUuid(), theOutcome.getQueryStopwatch(), theOutcome.getFoundMatchesCount());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Hook(value = Pointcut.JPA_PERFTRACE_INDEXSEARCH_QUERY_COMPLETE)
|
||||||
|
public void indexSearchQueryComplete(SearchRuntimeDetails theOutcome) {
|
||||||
|
log("Index query for {} completed in {} - Found {} matches", theOutcome.getSearchUuid(), theOutcome.getQueryStopwatch(), theOutcome.getFoundIndexMatchesCount());
|
||||||
|
}
|
||||||
|
|
||||||
@Hook(value = Pointcut.JPA_PERFTRACE_INFO)
|
@Hook(value = Pointcut.JPA_PERFTRACE_INFO)
|
||||||
public void info(StorageProcessingMessage theMessage) {
|
public void info(StorageProcessingMessage theMessage) {
|
||||||
log("[INFO] " + theMessage);
|
log("[INFO] " + theMessage);
|
||||||
|
|
|
@ -0,0 +1,98 @@
|
||||||
|
package ca.uhn.fhir.jpa.provider.dstu3;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoObservation;
|
||||||
|
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||||
|
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||||
|
import ca.uhn.fhir.model.api.annotation.Description;
|
||||||
|
import ca.uhn.fhir.model.valueset.BundleTypeEnum;
|
||||||
|
import ca.uhn.fhir.rest.annotation.*;
|
||||||
|
import ca.uhn.fhir.rest.api.Constants;
|
||||||
|
import ca.uhn.fhir.rest.api.SortOrderEnum;
|
||||||
|
import ca.uhn.fhir.rest.api.SortSpec;
|
||||||
|
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||||
|
import ca.uhn.fhir.rest.param.*;
|
||||||
|
import org.hl7.fhir.dstu3.model.Observation;
|
||||||
|
import org.hl7.fhir.dstu3.model.UnsignedIntType;
|
||||||
|
import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
||||||
|
|
||||||
|
/*
|
||||||
|
* #%L
|
||||||
|
* HAPI FHIR JPA Server
|
||||||
|
* %%
|
||||||
|
* Copyright (C) 2014 - 2020 University Health Network
|
||||||
|
* %%
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
* #L%
|
||||||
|
*/
|
||||||
|
|
||||||
|
public class BaseJpaResourceProviderObservationDstu3 extends JpaResourceProviderDstu3<Observation> {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Observation/$lastn
|
||||||
|
*/
|
||||||
|
@Operation(name = JpaConstants.OPERATION_LASTN, idempotent = true, bundleType = BundleTypeEnum.SEARCHSET)
|
||||||
|
public IBundleProvider observationLastN(
|
||||||
|
|
||||||
|
javax.servlet.http.HttpServletRequest theServletRequest,
|
||||||
|
javax.servlet.http.HttpServletResponse theServletResponse,
|
||||||
|
|
||||||
|
ca.uhn.fhir.rest.api.server.RequestDetails theRequestDetails,
|
||||||
|
|
||||||
|
@Description(formalDefinition = "Results from this method are returned across multiple pages. This parameter controls the size of those pages.")
|
||||||
|
@OperationParam(name = Constants.PARAM_COUNT)
|
||||||
|
UnsignedIntType theCount,
|
||||||
|
|
||||||
|
@Description(shortDefinition="The classification of the type of observation")
|
||||||
|
@OperationParam(name="category")
|
||||||
|
TokenAndListParam theCategory,
|
||||||
|
|
||||||
|
@Description(shortDefinition="The code of the observation type")
|
||||||
|
@OperationParam(name="code")
|
||||||
|
TokenAndListParam theCode,
|
||||||
|
|
||||||
|
@Description(shortDefinition="The subject that the observation is about (if patient)")
|
||||||
|
@OperationParam(name="patient")
|
||||||
|
ReferenceAndListParam thePatient,
|
||||||
|
|
||||||
|
@Description(shortDefinition="The subject that the observation is about")
|
||||||
|
@OperationParam(name="subject" )
|
||||||
|
ReferenceAndListParam theSubject,
|
||||||
|
|
||||||
|
@Description(shortDefinition="The maximum number of observations to return for each observation code")
|
||||||
|
@OperationParam(name = "max", typeName = "integer", min = 0, max = 1)
|
||||||
|
IPrimitiveType<Integer> theMax
|
||||||
|
|
||||||
|
) {
|
||||||
|
startRequest(theServletRequest);
|
||||||
|
try {
|
||||||
|
SearchParameterMap paramMap = new SearchParameterMap();
|
||||||
|
paramMap.add(Observation.SP_CATEGORY, theCategory);
|
||||||
|
paramMap.add(Observation.SP_CODE, theCode);
|
||||||
|
if (thePatient != null) {
|
||||||
|
paramMap.add("patient", thePatient);
|
||||||
|
}
|
||||||
|
if (theSubject != null) {
|
||||||
|
paramMap.add("subject", theSubject);
|
||||||
|
}
|
||||||
|
paramMap.setLastNMax(theMax.getValue());
|
||||||
|
if (theCount != null) {
|
||||||
|
paramMap.setCount(theCount.getValue());
|
||||||
|
}
|
||||||
|
|
||||||
|
return ((IFhirResourceDaoObservation<Observation>) getDao()).observationsLastN(paramMap, theRequestDetails, theServletResponse);
|
||||||
|
} finally {
|
||||||
|
endRequest(theServletRequest);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,98 @@
|
||||||
|
package ca.uhn.fhir.jpa.provider.r4;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoObservation;
|
||||||
|
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||||
|
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||||
|
import ca.uhn.fhir.model.api.annotation.Description;
|
||||||
|
import ca.uhn.fhir.model.valueset.BundleTypeEnum;
|
||||||
|
import ca.uhn.fhir.rest.annotation.*;
|
||||||
|
import ca.uhn.fhir.rest.api.Constants;
|
||||||
|
import ca.uhn.fhir.rest.api.SortOrderEnum;
|
||||||
|
import ca.uhn.fhir.rest.api.SortSpec;
|
||||||
|
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||||
|
import ca.uhn.fhir.rest.param.*;
|
||||||
|
import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
||||||
|
import org.hl7.fhir.r4.model.*;
|
||||||
|
|
||||||
|
/*
|
||||||
|
* #%L
|
||||||
|
* HAPI FHIR JPA Server
|
||||||
|
* %%
|
||||||
|
* Copyright (C) 2014 - 2020 University Health Network
|
||||||
|
* %%
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
* #L%
|
||||||
|
*/
|
||||||
|
|
||||||
|
public class BaseJpaResourceProviderObservationR4 extends JpaResourceProviderR4<Observation> {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Observation/$lastn
|
||||||
|
*/
|
||||||
|
@Operation(name = JpaConstants.OPERATION_LASTN, idempotent = true, bundleType = BundleTypeEnum.SEARCHSET)
|
||||||
|
public IBundleProvider observationLastN(
|
||||||
|
|
||||||
|
javax.servlet.http.HttpServletRequest theServletRequest,
|
||||||
|
javax.servlet.http.HttpServletResponse theServletResponse,
|
||||||
|
|
||||||
|
ca.uhn.fhir.rest.api.server.RequestDetails theRequestDetails,
|
||||||
|
|
||||||
|
@Description(formalDefinition = "Results from this method are returned across multiple pages. This parameter controls the size of those pages.")
|
||||||
|
@OperationParam(name = Constants.PARAM_COUNT)
|
||||||
|
UnsignedIntType theCount,
|
||||||
|
|
||||||
|
@Description(shortDefinition="The classification of the type of observation")
|
||||||
|
@OperationParam(name="category")
|
||||||
|
TokenAndListParam theCategory,
|
||||||
|
|
||||||
|
@Description(shortDefinition="The code of the observation type")
|
||||||
|
@OperationParam(name="code")
|
||||||
|
TokenAndListParam theCode,
|
||||||
|
|
||||||
|
@Description(shortDefinition="The subject that the observation is about (if patient)")
|
||||||
|
@OperationParam(name="patient")
|
||||||
|
ReferenceAndListParam thePatient,
|
||||||
|
|
||||||
|
@Description(shortDefinition="The subject that the observation is about")
|
||||||
|
@OperationParam(name="subject" )
|
||||||
|
ReferenceAndListParam theSubject,
|
||||||
|
|
||||||
|
@Description(shortDefinition="The maximum number of observations to return for each observation code")
|
||||||
|
@OperationParam(name = "max", typeName = "integer", min = 0, max = 1)
|
||||||
|
IPrimitiveType<Integer> theMax
|
||||||
|
|
||||||
|
) {
|
||||||
|
startRequest(theServletRequest);
|
||||||
|
try {
|
||||||
|
SearchParameterMap paramMap = new SearchParameterMap();
|
||||||
|
paramMap.add(Observation.SP_CATEGORY, theCategory);
|
||||||
|
paramMap.add(Observation.SP_CODE, theCode);
|
||||||
|
if (thePatient != null) {
|
||||||
|
paramMap.add(Observation.SP_PATIENT, thePatient);
|
||||||
|
}
|
||||||
|
if (theSubject != null) {
|
||||||
|
paramMap.add(Observation.SP_SUBJECT, theSubject);
|
||||||
|
}
|
||||||
|
paramMap.setLastNMax(theMax.getValue());
|
||||||
|
if (theCount != null) {
|
||||||
|
paramMap.setCount(theCount.getValue());
|
||||||
|
}
|
||||||
|
|
||||||
|
return ((IFhirResourceDaoObservation<Observation>) getDao()).observationsLastN(paramMap, theRequestDetails, theServletResponse);
|
||||||
|
} finally {
|
||||||
|
endRequest(theServletRequest);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,98 @@
|
||||||
|
package ca.uhn.fhir.jpa.provider.r5;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoObservation;
|
||||||
|
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||||
|
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||||
|
import ca.uhn.fhir.model.api.annotation.Description;
|
||||||
|
import ca.uhn.fhir.model.valueset.BundleTypeEnum;
|
||||||
|
import ca.uhn.fhir.rest.annotation.*;
|
||||||
|
import ca.uhn.fhir.rest.api.Constants;
|
||||||
|
import ca.uhn.fhir.rest.api.SortOrderEnum;
|
||||||
|
import ca.uhn.fhir.rest.api.SortSpec;
|
||||||
|
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||||
|
import ca.uhn.fhir.rest.param.*;
|
||||||
|
import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
||||||
|
import org.hl7.fhir.r5.model.UnsignedIntType;
|
||||||
|
import org.hl7.fhir.r5.model.Observation;
|
||||||
|
|
||||||
|
/*
|
||||||
|
* #%L
|
||||||
|
* HAPI FHIR JPA Server
|
||||||
|
* %%
|
||||||
|
* Copyright (C) 2014 - 2020 University Health Network
|
||||||
|
* %%
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
* #L%
|
||||||
|
*/
|
||||||
|
|
||||||
|
public class BaseJpaResourceProviderObservationR5 extends JpaResourceProviderR5<Observation> {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Observation/$lastn
|
||||||
|
*/
|
||||||
|
@Operation(name = JpaConstants.OPERATION_LASTN, idempotent = true, bundleType = BundleTypeEnum.SEARCHSET)
|
||||||
|
public IBundleProvider observationLastN(
|
||||||
|
|
||||||
|
javax.servlet.http.HttpServletRequest theServletRequest,
|
||||||
|
javax.servlet.http.HttpServletResponse theServletResponse,
|
||||||
|
|
||||||
|
ca.uhn.fhir.rest.api.server.RequestDetails theRequestDetails,
|
||||||
|
|
||||||
|
@Description(formalDefinition = "Results from this method are returned across multiple pages. This parameter controls the size of those pages.")
|
||||||
|
@OperationParam(name = Constants.PARAM_COUNT)
|
||||||
|
UnsignedIntType theCount,
|
||||||
|
|
||||||
|
@Description(shortDefinition="The classification of the type of observation")
|
||||||
|
@OperationParam(name="category")
|
||||||
|
TokenAndListParam theCategory,
|
||||||
|
|
||||||
|
@Description(shortDefinition="The code of the observation type")
|
||||||
|
@OperationParam(name="code")
|
||||||
|
TokenAndListParam theCode,
|
||||||
|
|
||||||
|
@Description(shortDefinition="The subject that the observation is about (if patient)")
|
||||||
|
@OperationParam(name="patient")
|
||||||
|
ReferenceAndListParam thePatient,
|
||||||
|
|
||||||
|
@Description(shortDefinition="The subject that the observation is about")
|
||||||
|
@OperationParam(name="subject" )
|
||||||
|
ReferenceAndListParam theSubject,
|
||||||
|
|
||||||
|
@Description(shortDefinition="The maximum number of observations to return for each observation code")
|
||||||
|
@OperationParam(name = "max", typeName = "integer", min = 0, max = 1)
|
||||||
|
IPrimitiveType<Integer> theMax
|
||||||
|
|
||||||
|
) {
|
||||||
|
startRequest(theServletRequest);
|
||||||
|
try {
|
||||||
|
SearchParameterMap paramMap = new SearchParameterMap();
|
||||||
|
paramMap.add(Observation.SP_CATEGORY, theCategory);
|
||||||
|
paramMap.add(Observation.SP_CODE, theCode);
|
||||||
|
if (thePatient != null) {
|
||||||
|
paramMap.add(Observation.SP_PATIENT, thePatient);
|
||||||
|
}
|
||||||
|
if (theSubject != null) {
|
||||||
|
paramMap.add(Observation.SP_SUBJECT, theSubject);
|
||||||
|
}
|
||||||
|
paramMap.setLastNMax(theMax.getValue());
|
||||||
|
if (theCount != null) {
|
||||||
|
paramMap.setCount(theCount.getValue());
|
||||||
|
}
|
||||||
|
|
||||||
|
return ((IFhirResourceDaoObservation<Observation>) getDao()).observationsLastN(paramMap, theRequestDetails, theServletResponse);
|
||||||
|
} finally {
|
||||||
|
endRequest(theServletRequest);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,33 @@
|
||||||
|
package ca.uhn.fhir.jpa.search.lastn;
|
||||||
|
|
||||||
|
import org.apache.http.Header;
|
||||||
|
import org.apache.http.HttpHost;
|
||||||
|
import org.apache.http.auth.AuthScope;
|
||||||
|
import org.apache.http.auth.UsernamePasswordCredentials;
|
||||||
|
import org.apache.http.client.CredentialsProvider;
|
||||||
|
import org.apache.http.impl.client.BasicCredentialsProvider;
|
||||||
|
import org.apache.http.message.BasicHeader;
|
||||||
|
import org.shadehapi.elasticsearch.client.RestClient;
|
||||||
|
import org.shadehapi.elasticsearch.client.RestClientBuilder;
|
||||||
|
import org.shadehapi.elasticsearch.client.RestHighLevelClient;
|
||||||
|
|
||||||
|
public class ElasticsearchRestClientFactory {
|
||||||
|
|
||||||
|
static public RestHighLevelClient createElasticsearchHighLevelRestClient(String theHostname, int thePort, String theUsername, String thePassword) {
|
||||||
|
final CredentialsProvider credentialsProvider =
|
||||||
|
new BasicCredentialsProvider();
|
||||||
|
credentialsProvider.setCredentials(AuthScope.ANY,
|
||||||
|
new UsernamePasswordCredentials(theUsername, thePassword));
|
||||||
|
|
||||||
|
RestClientBuilder clientBuilder = RestClient.builder(
|
||||||
|
new HttpHost(theHostname, thePort))
|
||||||
|
.setHttpClientConfigCallback(httpClientBuilder -> httpClientBuilder
|
||||||
|
.setDefaultCredentialsProvider(credentialsProvider));
|
||||||
|
|
||||||
|
Header[] defaultHeaders = new Header[]{new BasicHeader("Content-Type", "application/json")};
|
||||||
|
clientBuilder.setDefaultHeaders(defaultHeaders);
|
||||||
|
|
||||||
|
return new RestHighLevelClient(clientBuilder);
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,546 @@
|
||||||
|
package ca.uhn.fhir.jpa.search.lastn;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.context.FhirContext;
|
||||||
|
import ca.uhn.fhir.jpa.model.util.CodeSystemHash;
|
||||||
|
import ca.uhn.fhir.jpa.search.lastn.json.CodeJson;
|
||||||
|
import ca.uhn.fhir.jpa.search.lastn.json.ObservationJson;
|
||||||
|
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||||
|
import ca.uhn.fhir.jpa.searchparam.util.LastNParameterHelper;
|
||||||
|
import ca.uhn.fhir.model.api.IQueryParameterType;
|
||||||
|
import ca.uhn.fhir.rest.param.ReferenceParam;
|
||||||
|
import ca.uhn.fhir.rest.param.TokenParam;
|
||||||
|
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||||
|
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
import com.google.common.annotations.VisibleForTesting;
|
||||||
|
import org.shadehapi.elasticsearch.action.DocWriteResponse;
|
||||||
|
import org.shadehapi.elasticsearch.action.admin.indices.create.CreateIndexRequest;
|
||||||
|
import org.shadehapi.elasticsearch.action.admin.indices.create.CreateIndexResponse;
|
||||||
|
import org.shadehapi.elasticsearch.action.admin.indices.get.GetIndexRequest;
|
||||||
|
import org.shadehapi.elasticsearch.action.index.IndexRequest;
|
||||||
|
import org.shadehapi.elasticsearch.action.index.IndexResponse;
|
||||||
|
import org.shadehapi.elasticsearch.action.search.SearchRequest;
|
||||||
|
import org.shadehapi.elasticsearch.action.search.SearchResponse;
|
||||||
|
import org.shadehapi.elasticsearch.client.RequestOptions;
|
||||||
|
import org.shadehapi.elasticsearch.client.RestHighLevelClient;
|
||||||
|
import org.shadehapi.elasticsearch.common.xcontent.XContentType;
|
||||||
|
import org.shadehapi.elasticsearch.index.query.BoolQueryBuilder;
|
||||||
|
import org.shadehapi.elasticsearch.index.query.QueryBuilders;
|
||||||
|
import org.shadehapi.elasticsearch.index.reindex.DeleteByQueryRequest;
|
||||||
|
import org.shadehapi.elasticsearch.search.SearchHit;
|
||||||
|
import org.shadehapi.elasticsearch.search.SearchHits;
|
||||||
|
import org.shadehapi.elasticsearch.search.aggregations.AggregationBuilder;
|
||||||
|
import org.shadehapi.elasticsearch.search.aggregations.AggregationBuilders;
|
||||||
|
import org.shadehapi.elasticsearch.search.aggregations.Aggregations;
|
||||||
|
import org.shadehapi.elasticsearch.search.aggregations.BucketOrder;
|
||||||
|
import org.shadehapi.elasticsearch.search.aggregations.bucket.composite.CompositeAggregationBuilder;
|
||||||
|
import org.shadehapi.elasticsearch.search.aggregations.bucket.composite.CompositeValuesSourceBuilder;
|
||||||
|
import org.shadehapi.elasticsearch.search.aggregations.bucket.composite.ParsedComposite;
|
||||||
|
import org.shadehapi.elasticsearch.search.aggregations.bucket.composite.TermsValuesSourceBuilder;
|
||||||
|
import org.shadehapi.elasticsearch.search.aggregations.bucket.terms.ParsedTerms;
|
||||||
|
import org.shadehapi.elasticsearch.search.aggregations.bucket.terms.Terms;
|
||||||
|
import org.shadehapi.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
|
||||||
|
import org.shadehapi.elasticsearch.search.aggregations.metrics.tophits.ParsedTopHits;
|
||||||
|
import org.shadehapi.elasticsearch.search.aggregations.support.ValueType;
|
||||||
|
import org.shadehapi.elasticsearch.search.builder.SearchSourceBuilder;
|
||||||
|
import org.shadehapi.elasticsearch.search.sort.SortOrder;
|
||||||
|
|
||||||
|
import java.io.BufferedReader;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.InputStreamReader;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.function.Function;
|
||||||
|
|
||||||
|
import static org.apache.commons.lang3.StringUtils.isBlank;
|
||||||
|
|
||||||
|
public class ElasticsearchSvcImpl implements IElasticsearchSvc {
|
||||||
|
|
||||||
|
public static final String OBSERVATION_INDEX = "observation_index";
|
||||||
|
public static final String OBSERVATION_CODE_INDEX = "code_index";
|
||||||
|
public static final String OBSERVATION_DOCUMENT_TYPE = "ca.uhn.fhir.jpa.model.entity.ObservationIndexedSearchParamLastNEntity";
|
||||||
|
public static final String CODE_DOCUMENT_TYPE = "ca.uhn.fhir.jpa.model.entity.ObservationIndexedCodeCodeableConceptEntity";
|
||||||
|
public static final String OBSERVATION_INDEX_SCHEMA_FILE = "ObservationIndexSchema.json";
|
||||||
|
public static final String OBSERVATION_CODE_INDEX_SCHEMA_FILE = "ObservationCodeIndexSchema.json";
|
||||||
|
|
||||||
|
private final RestHighLevelClient myRestHighLevelClient;
|
||||||
|
|
||||||
|
private final ObjectMapper objectMapper = new ObjectMapper();
|
||||||
|
|
||||||
|
private final String GROUP_BY_SUBJECT = "group_by_subject";
|
||||||
|
private final String GROUP_BY_SYSTEM = "group_by_system";
|
||||||
|
private final String GROUP_BY_CODE = "group_by_code";
|
||||||
|
private final String OBSERVATION_IDENTIFIER_FIELD_NAME = "identifier";
|
||||||
|
|
||||||
|
|
||||||
|
public ElasticsearchSvcImpl(String theHostname, int thePort, String theUsername, String thePassword) {
|
||||||
|
myRestHighLevelClient = ElasticsearchRestClientFactory.createElasticsearchHighLevelRestClient(theHostname, thePort, theUsername, thePassword);
|
||||||
|
|
||||||
|
try {
|
||||||
|
createObservationIndexIfMissing();
|
||||||
|
createObservationCodeIndexIfMissing();
|
||||||
|
} catch (IOException theE) {
|
||||||
|
throw new RuntimeException("Failed to create document index", theE);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private String getIndexSchema(String theSchemaFileName) throws IOException {
|
||||||
|
InputStreamReader input = new InputStreamReader(ElasticsearchSvcImpl.class.getResourceAsStream(theSchemaFileName));
|
||||||
|
BufferedReader reader = new BufferedReader(input);
|
||||||
|
StringBuilder sb = new StringBuilder();
|
||||||
|
String str;
|
||||||
|
while((str = reader.readLine())!= null){
|
||||||
|
sb.append(str);
|
||||||
|
}
|
||||||
|
|
||||||
|
return sb.toString();
|
||||||
|
}
|
||||||
|
|
||||||
|
private void createObservationIndexIfMissing() throws IOException {
|
||||||
|
if (indexExists(OBSERVATION_INDEX)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
String observationMapping = getIndexSchema(OBSERVATION_INDEX_SCHEMA_FILE);
|
||||||
|
if (!createIndex(OBSERVATION_INDEX, observationMapping)) {
|
||||||
|
throw new RuntimeException("Failed to create observation index");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void createObservationCodeIndexIfMissing() throws IOException {
|
||||||
|
if (indexExists(OBSERVATION_CODE_INDEX)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
String observationCodeMapping = getIndexSchema(OBSERVATION_CODE_INDEX_SCHEMA_FILE);
|
||||||
|
if (!createIndex(OBSERVATION_CODE_INDEX, observationCodeMapping)) {
|
||||||
|
throw new RuntimeException("Failed to create observation code index");
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private boolean createIndex(String theIndexName, String theMapping) throws IOException {
|
||||||
|
CreateIndexRequest request = new CreateIndexRequest(theIndexName);
|
||||||
|
request.source(theMapping, XContentType.JSON);
|
||||||
|
CreateIndexResponse createIndexResponse = myRestHighLevelClient.indices().create(request, RequestOptions.DEFAULT);
|
||||||
|
return createIndexResponse.isAcknowledged();
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private boolean indexExists(String theIndexName) throws IOException {
|
||||||
|
GetIndexRequest request = new GetIndexRequest();
|
||||||
|
request.indices(theIndexName);
|
||||||
|
return myRestHighLevelClient.indices().exists(request, RequestOptions.DEFAULT);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public List<String> executeLastN(SearchParameterMap theSearchParameterMap, FhirContext theFhirContext, Integer theMaxResultsToFetch) {
|
||||||
|
String[] topHitsInclude = {OBSERVATION_IDENTIFIER_FIELD_NAME};
|
||||||
|
return buildAndExecuteSearch(theSearchParameterMap, theFhirContext, topHitsInclude,
|
||||||
|
ObservationJson::getIdentifier, theMaxResultsToFetch);
|
||||||
|
}
|
||||||
|
|
||||||
|
private <T> List<T> buildAndExecuteSearch(SearchParameterMap theSearchParameterMap, FhirContext theFhirContext,
|
||||||
|
String[] topHitsInclude, Function<ObservationJson,T> setValue, Integer theMaxResultsToFetch) {
|
||||||
|
String patientParamName = LastNParameterHelper.getPatientParamName(theFhirContext);
|
||||||
|
String subjectParamName = LastNParameterHelper.getSubjectParamName(theFhirContext);
|
||||||
|
List<T> searchResults = new ArrayList<>();
|
||||||
|
if (theSearchParameterMap.containsKey(patientParamName)
|
||||||
|
|| theSearchParameterMap.containsKey(subjectParamName)) {
|
||||||
|
for (String subject : getSubjectReferenceCriteria(patientParamName, subjectParamName, theSearchParameterMap)) {
|
||||||
|
if (theMaxResultsToFetch != null && searchResults.size() >= theMaxResultsToFetch) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
SearchRequest myLastNRequest = buildObservationsSearchRequest(subject, theSearchParameterMap, theFhirContext,
|
||||||
|
createObservationSubjectAggregationBuilder(theSearchParameterMap.getLastNMax(), topHitsInclude));
|
||||||
|
try {
|
||||||
|
SearchResponse lastnResponse = executeSearchRequest(myLastNRequest);
|
||||||
|
searchResults.addAll(buildObservationList(lastnResponse, setValue, theSearchParameterMap, theFhirContext,
|
||||||
|
theMaxResultsToFetch));
|
||||||
|
} catch (IOException theE) {
|
||||||
|
throw new InvalidRequestException("Unable to execute LastN request", theE);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
SearchRequest myLastNRequest = buildObservationsSearchRequest(theSearchParameterMap, theFhirContext,
|
||||||
|
createObservationCodeAggregationBuilder(theSearchParameterMap.getLastNMax(), topHitsInclude));
|
||||||
|
try {
|
||||||
|
SearchResponse lastnResponse = executeSearchRequest(myLastNRequest);
|
||||||
|
searchResults.addAll(buildObservationList(lastnResponse, setValue, theSearchParameterMap, theFhirContext,
|
||||||
|
theMaxResultsToFetch));
|
||||||
|
} catch (IOException theE) {
|
||||||
|
throw new InvalidRequestException("Unable to execute LastN request", theE);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return searchResults;
|
||||||
|
}
|
||||||
|
|
||||||
|
private List<String> getSubjectReferenceCriteria(String thePatientParamName, String theSubjectParamName, SearchParameterMap theSearchParameterMap) {
|
||||||
|
List<String> subjectReferenceCriteria = new ArrayList<>();
|
||||||
|
|
||||||
|
List<List<IQueryParameterType>> patientParams = new ArrayList<>();
|
||||||
|
if (theSearchParameterMap.get(thePatientParamName) != null) {
|
||||||
|
patientParams.addAll(theSearchParameterMap.get(thePatientParamName));
|
||||||
|
}
|
||||||
|
if (theSearchParameterMap.get(theSubjectParamName) != null) {
|
||||||
|
patientParams.addAll(theSearchParameterMap.get(theSubjectParamName));
|
||||||
|
}
|
||||||
|
for (List<? extends IQueryParameterType> nextSubjectList : patientParams) {
|
||||||
|
subjectReferenceCriteria.addAll(getReferenceValues(nextSubjectList));
|
||||||
|
}
|
||||||
|
return subjectReferenceCriteria;
|
||||||
|
}
|
||||||
|
|
||||||
|
private List<String> getReferenceValues(List<? extends IQueryParameterType> referenceParams) {
|
||||||
|
List<String> referenceList = new ArrayList<>();
|
||||||
|
|
||||||
|
for (IQueryParameterType nextOr : referenceParams) {
|
||||||
|
|
||||||
|
if (nextOr instanceof ReferenceParam) {
|
||||||
|
ReferenceParam ref = (ReferenceParam) nextOr;
|
||||||
|
if (isBlank(ref.getChain())) {
|
||||||
|
referenceList.add(ref.getValue());
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
throw new IllegalArgumentException("Invalid token type (expecting ReferenceParam): " + nextOr.getClass());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return referenceList;
|
||||||
|
}
|
||||||
|
|
||||||
|
private CompositeAggregationBuilder createObservationSubjectAggregationBuilder(int theMaxNumberObservationsPerCode, String[] theTopHitsInclude) {
|
||||||
|
CompositeValuesSourceBuilder<?> subjectValuesBuilder = new TermsValuesSourceBuilder("subject").field("subject");
|
||||||
|
List<CompositeValuesSourceBuilder<?>> compositeAggSubjectSources = new ArrayList();
|
||||||
|
compositeAggSubjectSources.add(subjectValuesBuilder);
|
||||||
|
CompositeAggregationBuilder compositeAggregationSubjectBuilder = new CompositeAggregationBuilder(GROUP_BY_SUBJECT, compositeAggSubjectSources);
|
||||||
|
compositeAggregationSubjectBuilder.subAggregation(createObservationCodeAggregationBuilder(theMaxNumberObservationsPerCode, theTopHitsInclude));
|
||||||
|
compositeAggregationSubjectBuilder.size(10000);
|
||||||
|
|
||||||
|
return compositeAggregationSubjectBuilder;
|
||||||
|
}
|
||||||
|
|
||||||
|
private TermsAggregationBuilder createObservationCodeAggregationBuilder(int theMaxNumberObservationsPerCode, String[] theTopHitsInclude) {
|
||||||
|
TermsAggregationBuilder observationCodeCodeAggregationBuilder = new TermsAggregationBuilder(GROUP_BY_CODE, ValueType.STRING).field("codeconceptcodingcode");
|
||||||
|
observationCodeCodeAggregationBuilder.order(BucketOrder.key(true));
|
||||||
|
// Top Hits Aggregation
|
||||||
|
observationCodeCodeAggregationBuilder.subAggregation(AggregationBuilders.topHits("most_recent_effective")
|
||||||
|
.sort("effectivedtm", SortOrder.DESC)
|
||||||
|
.fetchSource(theTopHitsInclude, null).size(theMaxNumberObservationsPerCode));
|
||||||
|
observationCodeCodeAggregationBuilder.size(10000);
|
||||||
|
TermsAggregationBuilder observationCodeSystemAggregationBuilder = new TermsAggregationBuilder(GROUP_BY_SYSTEM, ValueType.STRING).field("codeconceptcodingsystem");
|
||||||
|
observationCodeSystemAggregationBuilder.order(BucketOrder.key(true));
|
||||||
|
observationCodeSystemAggregationBuilder.subAggregation(observationCodeCodeAggregationBuilder);
|
||||||
|
return observationCodeSystemAggregationBuilder;
|
||||||
|
}
|
||||||
|
|
||||||
|
private SearchResponse executeSearchRequest(SearchRequest searchRequest) throws IOException {
|
||||||
|
return myRestHighLevelClient.search(searchRequest, RequestOptions.DEFAULT);
|
||||||
|
}
|
||||||
|
|
||||||
|
private <T> List<T> buildObservationList(SearchResponse theSearchResponse, Function<ObservationJson,T> setValue,
|
||||||
|
SearchParameterMap theSearchParameterMap, FhirContext theFhirContext,
|
||||||
|
Integer theMaxResultsToFetch) throws IOException {
|
||||||
|
List<T> theObservationList = new ArrayList<>();
|
||||||
|
if (theSearchParameterMap.containsKey(LastNParameterHelper.getPatientParamName(theFhirContext))
|
||||||
|
|| theSearchParameterMap.containsKey(LastNParameterHelper.getSubjectParamName(theFhirContext))) {
|
||||||
|
for (ParsedComposite.ParsedBucket subjectBucket : getSubjectBuckets(theSearchResponse)) {
|
||||||
|
if (theMaxResultsToFetch != null && theObservationList.size() >= theMaxResultsToFetch) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
for (Terms.Bucket observationCodeBucket : getObservationCodeBuckets(subjectBucket)) {
|
||||||
|
if (theMaxResultsToFetch != null && theObservationList.size() >= theMaxResultsToFetch) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
for (SearchHit lastNMatch : getLastNMatches(observationCodeBucket)) {
|
||||||
|
if (theMaxResultsToFetch != null && theObservationList.size() >= theMaxResultsToFetch) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
String indexedObservation = lastNMatch.getSourceAsString();
|
||||||
|
ObservationJson observationJson = objectMapper.readValue(indexedObservation, ObservationJson.class);
|
||||||
|
theObservationList.add(setValue.apply(observationJson));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
for (Terms.Bucket observationCodeBucket : getObservationCodeBuckets(theSearchResponse)) {
|
||||||
|
if (theMaxResultsToFetch != null && theObservationList.size() >= theMaxResultsToFetch) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
for (SearchHit lastNMatch : getLastNMatches(observationCodeBucket)) {
|
||||||
|
if (theMaxResultsToFetch != null && theObservationList.size() >= theMaxResultsToFetch) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
String indexedObservation = lastNMatch.getSourceAsString();
|
||||||
|
ObservationJson observationJson = objectMapper.readValue(indexedObservation, ObservationJson.class);
|
||||||
|
theObservationList.add(setValue.apply(observationJson));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return theObservationList;
|
||||||
|
}
|
||||||
|
|
||||||
|
private List<ParsedComposite.ParsedBucket> getSubjectBuckets(SearchResponse theSearchResponse) {
|
||||||
|
Aggregations responseAggregations = theSearchResponse.getAggregations();
|
||||||
|
ParsedComposite aggregatedSubjects = responseAggregations.get(GROUP_BY_SUBJECT);
|
||||||
|
return aggregatedSubjects.getBuckets();
|
||||||
|
}
|
||||||
|
|
||||||
|
private List<? extends Terms.Bucket> getObservationCodeBuckets(SearchResponse theSearchResponse) {
|
||||||
|
Aggregations responseAggregations = theSearchResponse.getAggregations();
|
||||||
|
return getObservationCodeBuckets(responseAggregations);
|
||||||
|
}
|
||||||
|
|
||||||
|
private List<? extends Terms.Bucket> getObservationCodeBuckets(ParsedComposite.ParsedBucket theSubjectBucket) {
|
||||||
|
Aggregations observationCodeSystemAggregations = theSubjectBucket.getAggregations();
|
||||||
|
return getObservationCodeBuckets(observationCodeSystemAggregations);
|
||||||
|
}
|
||||||
|
|
||||||
|
private List<? extends Terms.Bucket> getObservationCodeBuckets(Aggregations theObservationCodeSystemAggregations) {
|
||||||
|
List<Terms.Bucket> retVal = new ArrayList<>();
|
||||||
|
ParsedTerms aggregatedObservationCodeSystems = theObservationCodeSystemAggregations.get(GROUP_BY_SYSTEM);
|
||||||
|
for(Terms.Bucket observationCodeSystem : aggregatedObservationCodeSystems.getBuckets()) {
|
||||||
|
Aggregations observationCodeCodeAggregations = observationCodeSystem.getAggregations();
|
||||||
|
ParsedTerms aggregatedObservationCodeCodes = observationCodeCodeAggregations.get(GROUP_BY_CODE);
|
||||||
|
retVal.addAll(aggregatedObservationCodeCodes.getBuckets());
|
||||||
|
}
|
||||||
|
return retVal;
|
||||||
|
}
|
||||||
|
|
||||||
|
private SearchHit[] getLastNMatches(Terms.Bucket theObservationCodeBucket) {
|
||||||
|
Aggregations topHitObservationCodes = theObservationCodeBucket.getAggregations();
|
||||||
|
ParsedTopHits parsedTopHits = topHitObservationCodes.get("most_recent_effective");
|
||||||
|
return parsedTopHits.getHits().getHits();
|
||||||
|
}
|
||||||
|
|
||||||
|
private SearchRequest buildObservationsSearchRequest(SearchParameterMap theSearchParameterMap, FhirContext theFhirContext, AggregationBuilder theAggregationBuilder) {
|
||||||
|
SearchRequest searchRequest = new SearchRequest(OBSERVATION_INDEX);
|
||||||
|
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
|
||||||
|
// Query
|
||||||
|
if (!searchParamsHaveLastNCriteria(theSearchParameterMap, theFhirContext)) {
|
||||||
|
searchSourceBuilder.query(QueryBuilders.matchAllQuery());
|
||||||
|
} else {
|
||||||
|
BoolQueryBuilder boolQueryBuilder = QueryBuilders.boolQuery();
|
||||||
|
addCategoriesCriteria(boolQueryBuilder, theSearchParameterMap, theFhirContext);
|
||||||
|
addObservationCodeCriteria(boolQueryBuilder, theSearchParameterMap, theFhirContext);
|
||||||
|
searchSourceBuilder.query(boolQueryBuilder);
|
||||||
|
}
|
||||||
|
searchSourceBuilder.size(0);
|
||||||
|
|
||||||
|
// Aggregation by order codes
|
||||||
|
searchSourceBuilder.aggregation(theAggregationBuilder);
|
||||||
|
searchRequest.source(searchSourceBuilder);
|
||||||
|
|
||||||
|
return searchRequest;
|
||||||
|
}
|
||||||
|
|
||||||
|
private SearchRequest buildObservationsSearchRequest(String theSubjectParam, SearchParameterMap theSearchParameterMap, FhirContext theFhirContext,
|
||||||
|
AggregationBuilder theAggregationBuilder) {
|
||||||
|
SearchRequest searchRequest = new SearchRequest(OBSERVATION_INDEX);
|
||||||
|
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
|
||||||
|
// Query
|
||||||
|
BoolQueryBuilder boolQueryBuilder = QueryBuilders.boolQuery();
|
||||||
|
boolQueryBuilder.must(QueryBuilders.termQuery("subject", theSubjectParam));
|
||||||
|
addCategoriesCriteria(boolQueryBuilder, theSearchParameterMap, theFhirContext);
|
||||||
|
addObservationCodeCriteria(boolQueryBuilder, theSearchParameterMap, theFhirContext);
|
||||||
|
searchSourceBuilder.query(boolQueryBuilder);
|
||||||
|
searchSourceBuilder.size(0);
|
||||||
|
|
||||||
|
// Aggregation by order codes
|
||||||
|
searchSourceBuilder.aggregation(theAggregationBuilder);
|
||||||
|
searchRequest.source(searchSourceBuilder);
|
||||||
|
|
||||||
|
return searchRequest;
|
||||||
|
}
|
||||||
|
|
||||||
|
private Boolean searchParamsHaveLastNCriteria(SearchParameterMap theSearchParameterMap, FhirContext theFhirContext) {
|
||||||
|
return theSearchParameterMap != null &&
|
||||||
|
(theSearchParameterMap.containsKey(LastNParameterHelper.getPatientParamName(theFhirContext))
|
||||||
|
|| theSearchParameterMap.containsKey(LastNParameterHelper.getSubjectParamName(theFhirContext))
|
||||||
|
|| theSearchParameterMap.containsKey(LastNParameterHelper.getCategoryParamName(theFhirContext))
|
||||||
|
|| theSearchParameterMap.containsKey(LastNParameterHelper.getCodeParamName(theFhirContext)));
|
||||||
|
}
|
||||||
|
|
||||||
|
private void addCategoriesCriteria(BoolQueryBuilder theBoolQueryBuilder, SearchParameterMap theSearchParameterMap, FhirContext theFhirContext) {
|
||||||
|
String categoryParamName = LastNParameterHelper.getCategoryParamName(theFhirContext);
|
||||||
|
if (theSearchParameterMap.containsKey(categoryParamName)) {
|
||||||
|
ArrayList<String> codeSystemHashList = new ArrayList<>();
|
||||||
|
ArrayList<String> codeOnlyList = new ArrayList<>();
|
||||||
|
ArrayList<String> systemOnlyList = new ArrayList<>();
|
||||||
|
ArrayList<String> textOnlyList = new ArrayList<>();
|
||||||
|
List<List<IQueryParameterType>> andOrParams = theSearchParameterMap.get(categoryParamName);
|
||||||
|
for (List<? extends IQueryParameterType> nextAnd : andOrParams) {
|
||||||
|
codeSystemHashList.addAll(getCodingCodeSystemValues(nextAnd));
|
||||||
|
codeOnlyList.addAll(getCodingCodeOnlyValues(nextAnd));
|
||||||
|
systemOnlyList.addAll(getCodingSystemOnlyValues(nextAnd));
|
||||||
|
textOnlyList.addAll(getCodingTextOnlyValues(nextAnd));
|
||||||
|
}
|
||||||
|
if (codeSystemHashList.size() > 0) {
|
||||||
|
theBoolQueryBuilder.must(QueryBuilders.termsQuery("categoryconceptcodingcode_system_hash", codeSystemHashList));
|
||||||
|
}
|
||||||
|
if (codeOnlyList.size() > 0) {
|
||||||
|
theBoolQueryBuilder.must(QueryBuilders.termsQuery("categoryconceptcodingcode", codeOnlyList));
|
||||||
|
}
|
||||||
|
if (systemOnlyList.size() > 0) {
|
||||||
|
theBoolQueryBuilder.must(QueryBuilders.termsQuery("categoryconceptcodingsystem", systemOnlyList));
|
||||||
|
}
|
||||||
|
if (textOnlyList.size() > 0) {
|
||||||
|
BoolQueryBuilder myTextBoolQueryBuilder = QueryBuilders.boolQuery();
|
||||||
|
for (String textOnlyParam : textOnlyList) {
|
||||||
|
myTextBoolQueryBuilder.should(QueryBuilders.matchPhraseQuery("categoryconceptcodingdisplay", textOnlyParam));
|
||||||
|
myTextBoolQueryBuilder.should(QueryBuilders.matchPhraseQuery("categoryconcepttext", textOnlyParam));
|
||||||
|
}
|
||||||
|
theBoolQueryBuilder.must(myTextBoolQueryBuilder);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private List<String> getCodingCodeSystemValues(List<? extends IQueryParameterType> codeParams) {
|
||||||
|
ArrayList<String> codeSystemHashList = new ArrayList<>();
|
||||||
|
for (IQueryParameterType nextOr : codeParams) {
|
||||||
|
if (nextOr instanceof TokenParam) {
|
||||||
|
TokenParam ref = (TokenParam) nextOr;
|
||||||
|
if (ref.getSystem() != null && ref.getValue() != null) {
|
||||||
|
codeSystemHashList.add(String.valueOf(CodeSystemHash.hashCodeSystem(ref.getSystem(), ref.getValue())));
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
throw new IllegalArgumentException("Invalid token type (expecting TokenParam): " + nextOr.getClass());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return codeSystemHashList;
|
||||||
|
}
|
||||||
|
|
||||||
|
private List<String> getCodingCodeOnlyValues(List<? extends IQueryParameterType> codeParams) {
|
||||||
|
ArrayList<String> codeOnlyList = new ArrayList<>();
|
||||||
|
for (IQueryParameterType nextOr : codeParams) {
|
||||||
|
|
||||||
|
if (nextOr instanceof TokenParam) {
|
||||||
|
TokenParam ref = (TokenParam) nextOr;
|
||||||
|
if (ref.getValue() != null && ref.getSystem() == null && !ref.isText()) {
|
||||||
|
codeOnlyList.add(ref.getValue());
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
throw new IllegalArgumentException("Invalid token type (expecting TokenParam): " + nextOr.getClass());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return codeOnlyList;
|
||||||
|
}
|
||||||
|
|
||||||
|
private List<String> getCodingSystemOnlyValues(List<? extends IQueryParameterType> codeParams) {
|
||||||
|
ArrayList<String> systemOnlyList = new ArrayList<>();
|
||||||
|
for (IQueryParameterType nextOr : codeParams) {
|
||||||
|
|
||||||
|
if (nextOr instanceof TokenParam) {
|
||||||
|
TokenParam ref = (TokenParam) nextOr;
|
||||||
|
if (ref.getValue() == null && ref.getSystem() != null) {
|
||||||
|
systemOnlyList.add(ref.getSystem());
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
throw new IllegalArgumentException("Invalid token type (expecting TokenParam): " + nextOr.getClass());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return systemOnlyList;
|
||||||
|
}
|
||||||
|
|
||||||
|
private List<String> getCodingTextOnlyValues(List<? extends IQueryParameterType> codeParams) {
|
||||||
|
ArrayList<String> textOnlyList = new ArrayList<>();
|
||||||
|
for (IQueryParameterType nextOr : codeParams) {
|
||||||
|
|
||||||
|
if (nextOr instanceof TokenParam) {
|
||||||
|
TokenParam ref = (TokenParam) nextOr;
|
||||||
|
if (ref.isText() && ref.getValue() != null) {
|
||||||
|
textOnlyList.add(ref.getValue());
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
throw new IllegalArgumentException("Invalid token type (expecting TokenParam): " + nextOr.getClass());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return textOnlyList;
|
||||||
|
}
|
||||||
|
|
||||||
|
private void addObservationCodeCriteria(BoolQueryBuilder theBoolQueryBuilder, SearchParameterMap theSearchParameterMap, FhirContext theFhirContext) {
|
||||||
|
String codeParamName = LastNParameterHelper.getCodeParamName(theFhirContext);
|
||||||
|
if (theSearchParameterMap.containsKey(codeParamName)) {
|
||||||
|
ArrayList<String> codeSystemHashList = new ArrayList<>();
|
||||||
|
ArrayList<String> codeOnlyList = new ArrayList<>();
|
||||||
|
ArrayList<String> systemOnlyList = new ArrayList<>();
|
||||||
|
ArrayList<String> textOnlyList = new ArrayList<>();
|
||||||
|
List<List<IQueryParameterType>> andOrParams = theSearchParameterMap.get(codeParamName);
|
||||||
|
for (List<? extends IQueryParameterType> nextAnd : andOrParams) {
|
||||||
|
codeSystemHashList.addAll(getCodingCodeSystemValues(nextAnd));
|
||||||
|
codeOnlyList.addAll(getCodingCodeOnlyValues(nextAnd));
|
||||||
|
systemOnlyList.addAll(getCodingSystemOnlyValues(nextAnd));
|
||||||
|
textOnlyList.addAll(getCodingTextOnlyValues(nextAnd));
|
||||||
|
}
|
||||||
|
if (codeSystemHashList.size() > 0) {
|
||||||
|
theBoolQueryBuilder.must(QueryBuilders.termsQuery("codeconceptcodingcode_system_hash", codeSystemHashList));
|
||||||
|
}
|
||||||
|
if (codeOnlyList.size() > 0) {
|
||||||
|
theBoolQueryBuilder.must(QueryBuilders.termsQuery("codeconceptcodingcode", codeOnlyList));
|
||||||
|
}
|
||||||
|
if (systemOnlyList.size() > 0) {
|
||||||
|
theBoolQueryBuilder.must(QueryBuilders.termsQuery("codeconceptcodingsystem", systemOnlyList));
|
||||||
|
}
|
||||||
|
if (textOnlyList.size() > 0) {
|
||||||
|
BoolQueryBuilder myTextBoolQueryBuilder = QueryBuilders.boolQuery();
|
||||||
|
for (String textOnlyParam : textOnlyList) {
|
||||||
|
myTextBoolQueryBuilder.should(QueryBuilders.matchPhraseQuery("codeconceptcodingdisplay", textOnlyParam));
|
||||||
|
myTextBoolQueryBuilder.should(QueryBuilders.matchPhraseQuery("codeconcepttext", textOnlyParam));
|
||||||
|
}
|
||||||
|
theBoolQueryBuilder.must(myTextBoolQueryBuilder);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@VisibleForTesting
|
||||||
|
List<ObservationJson> executeLastNWithAllFields(SearchParameterMap theSearchParameterMap, FhirContext theFhirContext) {
|
||||||
|
return buildAndExecuteSearch(theSearchParameterMap, theFhirContext, null, t -> t, 100);
|
||||||
|
}
|
||||||
|
|
||||||
|
@VisibleForTesting
|
||||||
|
List<CodeJson> queryAllIndexedObservationCodes() throws IOException {
|
||||||
|
SearchRequest codeSearchRequest = new SearchRequest(OBSERVATION_CODE_INDEX);
|
||||||
|
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
|
||||||
|
// Query
|
||||||
|
searchSourceBuilder.query(QueryBuilders.matchAllQuery());
|
||||||
|
searchSourceBuilder.size(1000);
|
||||||
|
codeSearchRequest.source(searchSourceBuilder);
|
||||||
|
SearchResponse codeSearchResponse = executeSearchRequest(codeSearchRequest);
|
||||||
|
return buildCodeResult(codeSearchResponse);
|
||||||
|
}
|
||||||
|
|
||||||
|
private List<CodeJson> buildCodeResult(SearchResponse theSearchResponse) throws JsonProcessingException {
|
||||||
|
SearchHits codeHits = theSearchResponse.getHits();
|
||||||
|
List<CodeJson> codes = new ArrayList<>();
|
||||||
|
for (SearchHit codeHit : codeHits) {
|
||||||
|
CodeJson code = objectMapper.readValue(codeHit.getSourceAsString(), CodeJson.class);
|
||||||
|
codes.add(code);
|
||||||
|
}
|
||||||
|
return codes;
|
||||||
|
}
|
||||||
|
|
||||||
|
@VisibleForTesting
|
||||||
|
boolean performIndex(String theIndexName, String theDocumentId, String theIndexDocument, String theDocumentType) throws IOException {
|
||||||
|
IndexResponse indexResponse = myRestHighLevelClient.index(createIndexRequest(theIndexName, theDocumentId, theIndexDocument, theDocumentType),
|
||||||
|
RequestOptions.DEFAULT);
|
||||||
|
|
||||||
|
return (indexResponse.getResult() == DocWriteResponse.Result.CREATED) || (indexResponse.getResult() == DocWriteResponse.Result.UPDATED);
|
||||||
|
}
|
||||||
|
|
||||||
|
private IndexRequest createIndexRequest(String theIndexName, String theDocumentId, String theObservationDocument, String theDocumentType) {
|
||||||
|
IndexRequest request = new IndexRequest(theIndexName);
|
||||||
|
request.id(theDocumentId);
|
||||||
|
request.type(theDocumentType);
|
||||||
|
|
||||||
|
request.source(theObservationDocument, XContentType.JSON);
|
||||||
|
return request;
|
||||||
|
}
|
||||||
|
|
||||||
|
@VisibleForTesting
|
||||||
|
void deleteAllDocuments(String theIndexName) throws IOException {
|
||||||
|
DeleteByQueryRequest deleteByQueryRequest = new DeleteByQueryRequest(theIndexName);
|
||||||
|
deleteByQueryRequest.setQuery(QueryBuilders.matchAllQuery());
|
||||||
|
myRestHighLevelClient.deleteByQuery(deleteByQueryRequest, RequestOptions.DEFAULT);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,18 @@
|
||||||
|
package ca.uhn.fhir.jpa.search.lastn;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.context.FhirContext;
|
||||||
|
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
public interface IElasticsearchSvc {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns identifiers for the last most recent N observations that meet the specified criteria.
|
||||||
|
* @param theSearchParameterMap SearchParameterMap containing search parameters used for filtering the last N observations. Supported parameters include Subject, Patient, Code, Category and Max (the parameter used to determine N).
|
||||||
|
* @param theFhirContext Current FhirContext.
|
||||||
|
* @param theMaxResultsToFetch The maximum number of results to return for the purpose of paging.
|
||||||
|
* @return
|
||||||
|
*/
|
||||||
|
List<String> executeLastN(SearchParameterMap theSearchParameterMap, FhirContext theFhirContext, Integer theMaxResultsToFetch);
|
||||||
|
}
|
|
@ -0,0 +1,82 @@
|
||||||
|
package ca.uhn.fhir.jpa.search.lastn.json;
|
||||||
|
|
||||||
|
/*
|
||||||
|
* #%L
|
||||||
|
* Smile CDR - CDR
|
||||||
|
* %%
|
||||||
|
* Copyright (C) 2016 - 2019 Simpatico Intelligent Systems Inc
|
||||||
|
* %%
|
||||||
|
* All rights reserved.
|
||||||
|
* #L%
|
||||||
|
*/
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.model.util.CodeSystemHash;
|
||||||
|
import com.fasterxml.jackson.annotation.JsonAutoDetect;
|
||||||
|
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||||
|
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||||
|
import org.hl7.fhir.r4.model.CodeableConcept;
|
||||||
|
import org.hl7.fhir.r4.model.Coding;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
@JsonInclude(JsonInclude.Include.NON_NULL)
|
||||||
|
@JsonAutoDetect(creatorVisibility = JsonAutoDetect.Visibility.NONE, fieldVisibility = JsonAutoDetect.Visibility.NONE, getterVisibility = JsonAutoDetect.Visibility.NONE, isGetterVisibility = JsonAutoDetect.Visibility.NONE, setterVisibility = JsonAutoDetect.Visibility.NONE)
|
||||||
|
public class CodeJson {
|
||||||
|
|
||||||
|
@JsonProperty(value = "codeable_concept_id", required = false)
|
||||||
|
private String myCodeableConceptId;
|
||||||
|
|
||||||
|
@JsonProperty(value = "text", required = false)
|
||||||
|
private String myCodeableConceptText;
|
||||||
|
|
||||||
|
@JsonProperty(value = "codingcode", required = false)
|
||||||
|
private List<String> myCoding_code = new ArrayList<>();
|
||||||
|
|
||||||
|
@JsonProperty(value = "codingcode_system_hash", required = true)
|
||||||
|
private List<String> myCoding_code_system_hash = new ArrayList<>();
|
||||||
|
|
||||||
|
@JsonProperty(value = "codingdisplay", required = false)
|
||||||
|
private List<String> myCoding_display = new ArrayList<>();
|
||||||
|
|
||||||
|
@JsonProperty(value = "codingsystem", required = false)
|
||||||
|
private List<String> myCoding_system = new ArrayList<>();
|
||||||
|
|
||||||
|
public CodeJson(){
|
||||||
|
}
|
||||||
|
|
||||||
|
public CodeJson(CodeableConcept theCodeableConcept, String theCodeableConceptId) {
|
||||||
|
myCodeableConceptText = theCodeableConcept.getText();
|
||||||
|
myCodeableConceptId = theCodeableConceptId;
|
||||||
|
for (Coding theCoding : theCodeableConcept.getCoding()) {
|
||||||
|
myCoding_code.add(theCoding.getCode());
|
||||||
|
myCoding_system.add(theCoding.getSystem());
|
||||||
|
myCoding_display.add(theCoding.getDisplay());
|
||||||
|
myCoding_code_system_hash.add(String.valueOf(CodeSystemHash.hashCodeSystem(theCoding.getSystem(), theCoding.getCode())));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getCodeableConceptId() {
|
||||||
|
return myCodeableConceptId;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getCodeableConceptText() {
|
||||||
|
return myCodeableConceptText;
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<String> getCoding_code() {
|
||||||
|
return myCoding_code;
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<String> getCoding_code_system_hash() {
|
||||||
|
return myCoding_code_system_hash;
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<String> getCoding_display() {
|
||||||
|
return myCoding_display;
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<String> getCoding_system() {
|
||||||
|
return myCoding_system;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,176 @@
|
||||||
|
package ca.uhn.fhir.jpa.search.lastn.json;
|
||||||
|
|
||||||
|
/*
|
||||||
|
* #%L
|
||||||
|
* Smile CDR - CDR
|
||||||
|
* %%
|
||||||
|
* Copyright (C) 2016 - 2019 Simpatico Intelligent Systems Inc
|
||||||
|
* %%
|
||||||
|
* All rights reserved.
|
||||||
|
* #L%
|
||||||
|
*/
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.model.util.CodeSystemHash;
|
||||||
|
import com.fasterxml.jackson.annotation.JsonAutoDetect;
|
||||||
|
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||||
|
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||||
|
import org.hl7.fhir.r4.model.CodeableConcept;
|
||||||
|
import org.hl7.fhir.r4.model.Coding;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Date;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
@JsonInclude(JsonInclude.Include.NON_NULL)
|
||||||
|
@JsonAutoDetect(creatorVisibility = JsonAutoDetect.Visibility.NONE, fieldVisibility = JsonAutoDetect.Visibility.NONE, getterVisibility = JsonAutoDetect.Visibility.NONE, isGetterVisibility = JsonAutoDetect.Visibility.NONE, setterVisibility = JsonAutoDetect.Visibility.NONE)
|
||||||
|
public class ObservationJson {
|
||||||
|
|
||||||
|
@JsonProperty(value = "identifier", required = true)
|
||||||
|
private String myIdentifier;
|
||||||
|
|
||||||
|
@JsonProperty(value = "subject", required = true)
|
||||||
|
private String mySubject;
|
||||||
|
|
||||||
|
@JsonProperty(value = "categoryconcepttext", required = false)
|
||||||
|
private List<String> myCategory_concept_text = new ArrayList<>();
|
||||||
|
|
||||||
|
@JsonProperty(value = "categoryconceptcodingcode", required = false)
|
||||||
|
private List<List<String>> myCategory_coding_code = new ArrayList<>();
|
||||||
|
|
||||||
|
@JsonProperty(value = "categoryconceptcodingcode_system_hash", required = false)
|
||||||
|
private List<List<String>> myCategory_coding_code_system_hash = new ArrayList<>();
|
||||||
|
|
||||||
|
@JsonProperty(value = "categoryconceptcodingdisplay", required = false)
|
||||||
|
private List<List<String>> myCategory_coding_display = new ArrayList<>();
|
||||||
|
|
||||||
|
@JsonProperty(value = "categoryconceptcodingsystem", required = false)
|
||||||
|
private List<List<String>> myCategory_coding_system = new ArrayList<>();
|
||||||
|
|
||||||
|
@JsonProperty(value = "codeconceptid", required = false)
|
||||||
|
private String myCode_concept_id;
|
||||||
|
|
||||||
|
@JsonProperty(value = "codeconcepttext", required = false)
|
||||||
|
private String myCode_concept_text;
|
||||||
|
|
||||||
|
@JsonProperty(value = "codeconceptcodingcode", required = false)
|
||||||
|
private String myCode_coding_code;
|
||||||
|
|
||||||
|
@JsonProperty(value = "codeconceptcodingcode_system_hash", required = false)
|
||||||
|
private String myCode_coding_code_system_hash;
|
||||||
|
|
||||||
|
@JsonProperty(value = "codeconceptcodingdisplay", required = false)
|
||||||
|
private String myCode_coding_display;
|
||||||
|
|
||||||
|
@JsonProperty(value = "codeconceptcodingsystem", required = false)
|
||||||
|
private String myCode_coding_system;
|
||||||
|
|
||||||
|
@JsonProperty(value = "effectivedtm", required = true)
|
||||||
|
private Date myEffectiveDtm;
|
||||||
|
|
||||||
|
public ObservationJson() {
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setIdentifier(String theIdentifier) {
|
||||||
|
myIdentifier = theIdentifier;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setSubject(String theSubject) {
|
||||||
|
mySubject = theSubject;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setCategories(List<CodeableConcept> theCategories) {
|
||||||
|
for (CodeableConcept theConcept : theCategories) {
|
||||||
|
myCategory_concept_text.add(theConcept.getText());
|
||||||
|
List<String> coding_code_system_hashes = new ArrayList<>();
|
||||||
|
List<String> coding_codes = new ArrayList<>();
|
||||||
|
List<String> coding_displays = new ArrayList<>();
|
||||||
|
List<String> coding_systems = new ArrayList<>();
|
||||||
|
for (Coding theCategoryCoding : theConcept.getCoding()) {
|
||||||
|
coding_code_system_hashes.add(String.valueOf(CodeSystemHash.hashCodeSystem(theCategoryCoding.getSystem(), theCategoryCoding.getCode())));
|
||||||
|
coding_codes.add(theCategoryCoding.getCode());
|
||||||
|
coding_displays.add(theCategoryCoding.getDisplay());
|
||||||
|
coding_systems.add(theCategoryCoding.getSystem());
|
||||||
|
}
|
||||||
|
myCategory_coding_code_system_hash.add(coding_code_system_hashes);
|
||||||
|
myCategory_coding_code.add(coding_codes);
|
||||||
|
myCategory_coding_display.add(coding_displays);
|
||||||
|
myCategory_coding_system.add(coding_systems);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<String> getCategory_concept_text() {
|
||||||
|
return myCategory_concept_text;
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<List<String>> getCategory_coding_code_system_hash() {
|
||||||
|
return myCategory_coding_code_system_hash;
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<List<String>> getCategory_coding_code() {
|
||||||
|
return myCategory_coding_code;
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<List<String>> getCategory_coding_display() {
|
||||||
|
return myCategory_coding_display;
|
||||||
|
}
|
||||||
|
|
||||||
|
public List<List<String>> getCategory_coding_system() {
|
||||||
|
return myCategory_coding_system;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setCode(CodeableConcept theCode) {
|
||||||
|
myCode_concept_text = theCode.getText();
|
||||||
|
for (Coding theCodeCoding : theCode.getCoding()) {
|
||||||
|
myCode_coding_code_system_hash = String.valueOf(CodeSystemHash.hashCodeSystem(theCodeCoding.getSystem(), theCodeCoding.getCode()));
|
||||||
|
myCode_coding_code = theCodeCoding.getCode();
|
||||||
|
myCode_coding_display = theCodeCoding.getDisplay();
|
||||||
|
myCode_coding_system = theCodeCoding.getSystem();
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getCode_concept_text() {
|
||||||
|
return myCode_concept_text;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getCode_coding_code_system_hash() {
|
||||||
|
return myCode_coding_code_system_hash;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getCode_coding_code() {
|
||||||
|
return myCode_coding_code;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getCode_coding_display() {
|
||||||
|
return myCode_coding_display;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getCode_coding_system() {
|
||||||
|
return myCode_coding_system;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setCode_concept_id(String theCodeId) {
|
||||||
|
myCode_concept_id = theCodeId;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getCode_concept_id() {
|
||||||
|
return myCode_concept_id;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setEffectiveDtm(Date theEffectiveDtm) {
|
||||||
|
myEffectiveDtm = theEffectiveDtm;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Date getEffectiveDtm() {
|
||||||
|
return myEffectiveDtm;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getSubject() {
|
||||||
|
return mySubject;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getIdentifier() {
|
||||||
|
return myIdentifier;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -34,8 +34,8 @@ import java.util.function.Consumer;
|
||||||
public class QueryChunker<T> {
|
public class QueryChunker<T> {
|
||||||
|
|
||||||
public void chunk(List<T> theInput, Consumer<List<T>> theBatchConsumer) {
|
public void chunk(List<T> theInput, Consumer<List<T>> theBatchConsumer) {
|
||||||
for (int i = 0; i < theInput.size(); i += SearchBuilder.MAXIMUM_PAGE_SIZE) {
|
for (int i = 0; i < theInput.size(); i += SearchBuilder.getMaximumPageSize()) {
|
||||||
int to = i + SearchBuilder.MAXIMUM_PAGE_SIZE;
|
int to = i + SearchBuilder.getMaximumPageSize();
|
||||||
to = Math.min(to, theInput.size());
|
to = Math.min(to, theInput.size());
|
||||||
List<T> batch = theInput.subList(i, to);
|
List<T> batch = theInput.subList(i, to);
|
||||||
theBatchConsumer.accept(batch);
|
theBatchConsumer.accept(batch);
|
||||||
|
|
|
@ -131,13 +131,15 @@ public class TestUtil {
|
||||||
OneToOne oneToOne = nextField.getAnnotation(OneToOne.class);
|
OneToOne oneToOne = nextField.getAnnotation(OneToOne.class);
|
||||||
boolean isOtherSideOfOneToManyMapping = oneToMany != null && isNotBlank(oneToMany.mappedBy());
|
boolean isOtherSideOfOneToManyMapping = oneToMany != null && isNotBlank(oneToMany.mappedBy());
|
||||||
boolean isOtherSideOfOneToOneMapping = oneToOne != null && isNotBlank(oneToOne.mappedBy());
|
boolean isOtherSideOfOneToOneMapping = oneToOne != null && isNotBlank(oneToOne.mappedBy());
|
||||||
|
boolean isField = nextField.getAnnotation(org.hibernate.search.annotations.Field.class) != null;
|
||||||
Validate.isTrue(
|
Validate.isTrue(
|
||||||
hasEmbedded ||
|
hasEmbedded ||
|
||||||
hasColumn ||
|
hasColumn ||
|
||||||
hasJoinColumn ||
|
hasJoinColumn ||
|
||||||
isOtherSideOfOneToManyMapping ||
|
isOtherSideOfOneToManyMapping ||
|
||||||
isOtherSideOfOneToOneMapping ||
|
isOtherSideOfOneToOneMapping ||
|
||||||
hasEmbeddedId, "Non-transient has no @Column or @JoinColumn or @EmbeddedId: " + nextField);
|
hasEmbeddedId ||
|
||||||
|
isField, "Non-transient has no @Column or @JoinColumn or @EmbeddedId: " + nextField);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,26 @@
|
||||||
|
{
|
||||||
|
"mappings" : {
|
||||||
|
"ca.uhn.fhir.jpa.model.entity.ObservationIndexedCodeCodeableConceptEntity" : {
|
||||||
|
"properties" : {
|
||||||
|
"codeable_concept_id" : {
|
||||||
|
"type" : "keyword"
|
||||||
|
},
|
||||||
|
"codingcode" : {
|
||||||
|
"type" : "keyword"
|
||||||
|
},
|
||||||
|
"codingcode_system_hash" : {
|
||||||
|
"type" : "keyword"
|
||||||
|
},
|
||||||
|
"codingdisplay" : {
|
||||||
|
"type" : "keyword"
|
||||||
|
},
|
||||||
|
"codingsystem" : {
|
||||||
|
"type" : "keyword"
|
||||||
|
},
|
||||||
|
"text" : {
|
||||||
|
"type" : "keyword"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,50 @@
|
||||||
|
{
|
||||||
|
"mappings" : {
|
||||||
|
"ca.uhn.fhir.jpa.model.entity.ObservationIndexedSearchParamLastNEntity" : {
|
||||||
|
"properties" : {
|
||||||
|
"codeconceptid" : {
|
||||||
|
"type" : "keyword"
|
||||||
|
},
|
||||||
|
"codeconcepttext" : {
|
||||||
|
"type" : "text"
|
||||||
|
},
|
||||||
|
"codeconceptcodingcode" : {
|
||||||
|
"type" : "keyword"
|
||||||
|
},
|
||||||
|
"codeconceptcodingsystem" : {
|
||||||
|
"type" : "keyword"
|
||||||
|
},
|
||||||
|
"codeconceptcodingcode_system_hash" : {
|
||||||
|
"type" : "keyword"
|
||||||
|
},
|
||||||
|
"codeconceptcodingdisplay" : {
|
||||||
|
"type" : "keyword"
|
||||||
|
},
|
||||||
|
"categoryconcepttext" : {
|
||||||
|
"type" : "keyword"
|
||||||
|
},
|
||||||
|
"categoryconceptcodingcode" : {
|
||||||
|
"type" : "keyword"
|
||||||
|
},
|
||||||
|
"categoryconceptcodingsystem" : {
|
||||||
|
"type" : "keyword"
|
||||||
|
},
|
||||||
|
"categoryconceptcodingcode_system_hash" : {
|
||||||
|
"type" : "keyword"
|
||||||
|
},
|
||||||
|
"categoryconceptcodingdisplay" : {
|
||||||
|
"type" : "keyword"
|
||||||
|
},
|
||||||
|
"effectivedtm" : {
|
||||||
|
"type" : "date"
|
||||||
|
},
|
||||||
|
"identifier" : {
|
||||||
|
"type" : "keyword"
|
||||||
|
},
|
||||||
|
"subject" : {
|
||||||
|
"type" : "keyword"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -111,9 +111,7 @@ public class TestR4Config extends BaseJavaConfigR4 {
|
||||||
SLF4JLogLevel level = SLF4JLogLevel.INFO;
|
SLF4JLogLevel level = SLF4JLogLevel.INFO;
|
||||||
DataSource dataSource = ProxyDataSourceBuilder
|
DataSource dataSource = ProxyDataSourceBuilder
|
||||||
.create(retVal)
|
.create(retVal)
|
||||||
// .logQueryBySlf4j(level, "SQL")
|
|
||||||
.logSlowQueryBySlf4j(10, TimeUnit.SECONDS)
|
.logSlowQueryBySlf4j(10, TimeUnit.SECONDS)
|
||||||
// .countQuery(new ThreadQueryCountHolder())
|
|
||||||
.beforeQuery(new BlockLargeNumbersOfParamsListener())
|
.beforeQuery(new BlockLargeNumbersOfParamsListener())
|
||||||
.afterQuery(captureQueriesListener())
|
.afterQuery(captureQueriesListener())
|
||||||
.afterQuery(new CurrentThreadCaptureQueriesListener())
|
.afterQuery(new CurrentThreadCaptureQueriesListener())
|
||||||
|
|
|
@ -22,6 +22,10 @@ public class TestR4ConfigWithElasticSearch extends TestR4Config {
|
||||||
|
|
||||||
private static final Logger ourLog = LoggerFactory.getLogger(TestR4ConfigWithElasticSearch.class);
|
private static final Logger ourLog = LoggerFactory.getLogger(TestR4ConfigWithElasticSearch.class);
|
||||||
private static final String ELASTIC_VERSION = "6.5.4";
|
private static final String ELASTIC_VERSION = "6.5.4";
|
||||||
|
protected final String elasticsearchHost = "localhost";
|
||||||
|
protected final String elasticsearchUserId = "";
|
||||||
|
protected final String elasticsearchPassword = "";
|
||||||
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@Bean
|
@Bean
|
||||||
|
@ -38,9 +42,9 @@ public class TestR4ConfigWithElasticSearch extends TestR4Config {
|
||||||
.setIndexSchemaManagementStrategy(IndexSchemaManagementStrategy.CREATE)
|
.setIndexSchemaManagementStrategy(IndexSchemaManagementStrategy.CREATE)
|
||||||
.setIndexManagementWaitTimeoutMillis(10000)
|
.setIndexManagementWaitTimeoutMillis(10000)
|
||||||
.setRequiredIndexStatus(ElasticsearchIndexStatus.YELLOW)
|
.setRequiredIndexStatus(ElasticsearchIndexStatus.YELLOW)
|
||||||
.setRestUrl("http://localhost:" + httpPort)
|
.setRestUrl("http://"+ elasticsearchHost + ":" + httpPort)
|
||||||
.setUsername("")
|
.setUsername(elasticsearchUserId)
|
||||||
.setPassword("")
|
.setPassword(elasticsearchPassword)
|
||||||
.apply(retVal);
|
.apply(retVal);
|
||||||
|
|
||||||
return retVal;
|
return retVal;
|
||||||
|
@ -65,7 +69,6 @@ public class TestR4ConfigWithElasticSearch extends TestR4Config {
|
||||||
return embeddedElastic;
|
return embeddedElastic;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@PreDestroy
|
@PreDestroy
|
||||||
public void stop() {
|
public void stop() {
|
||||||
embeddedElasticSearch().stop();
|
embeddedElasticSearch().stop();
|
||||||
|
|
|
@ -0,0 +1,16 @@
|
||||||
|
package ca.uhn.fhir.jpa.config;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.search.lastn.ElasticsearchSvcImpl;
|
||||||
|
import org.springframework.context.annotation.Bean;
|
||||||
|
import org.springframework.context.annotation.Configuration;
|
||||||
|
|
||||||
|
@Configuration
|
||||||
|
public class TestR4ConfigWithElasticsearchClient extends TestR4ConfigWithElasticSearch {
|
||||||
|
|
||||||
|
@Bean()
|
||||||
|
public ElasticsearchSvcImpl myElasticsearchSvc() {
|
||||||
|
int elasticsearchPort = embeddedElasticSearch().getHttpPort();
|
||||||
|
return new ElasticsearchSvcImpl(elasticsearchHost, elasticsearchPort, elasticsearchUserId, elasticsearchPassword);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -432,6 +432,145 @@ public class FhirResourceDaoDstu2SearchNoFtTest extends BaseJpaDstu2Test {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testDatePeriodParamEndOnly() {
|
||||||
|
{
|
||||||
|
Encounter enc = new Encounter();
|
||||||
|
enc.addIdentifier().setSystem("testDatePeriodParam").setValue("02");
|
||||||
|
enc.getPeriod().getEndElement().setValueAsString("2001-01-02");
|
||||||
|
myEncounterDao.create(enc, mySrd);
|
||||||
|
}
|
||||||
|
SearchParameterMap params;
|
||||||
|
List<Encounter> encs;
|
||||||
|
|
||||||
|
params = new SearchParameterMap();
|
||||||
|
params.add(Encounter.SP_DATE, new DateRangeParam(null, "2001-01-03"));
|
||||||
|
params.add(Encounter.SP_IDENTIFIER, new IdentifierDt("testDatePeriodParam", "02"));
|
||||||
|
encs = toList(myEncounterDao.search(params));
|
||||||
|
assertEquals(1, encs.size());
|
||||||
|
|
||||||
|
params = new SearchParameterMap();
|
||||||
|
params.add(Encounter.SP_DATE, new DateRangeParam("2001-01-01", "2001-01-03"));
|
||||||
|
params.add(Encounter.SP_IDENTIFIER, new IdentifierDt("testDatePeriodParam", "02"));
|
||||||
|
// encs = toList(ourEncounterDao.search(params));
|
||||||
|
// assertEquals(1, encs.size());
|
||||||
|
|
||||||
|
params = new SearchParameterMap();
|
||||||
|
params.add(Encounter.SP_DATE, new DateRangeParam("2001-01-01", null));
|
||||||
|
params.add(Encounter.SP_IDENTIFIER, new IdentifierDt("testDatePeriodParam", "02"));
|
||||||
|
encs = toList(myEncounterDao.search(params));
|
||||||
|
assertEquals(1, encs.size());
|
||||||
|
|
||||||
|
params = new SearchParameterMap();
|
||||||
|
params.add(Encounter.SP_DATE, new DateRangeParam(null, "2001-01-01"));
|
||||||
|
params.add(Encounter.SP_IDENTIFIER, new IdentifierDt("testDatePeriodParam", "02"));
|
||||||
|
encs = toList(myEncounterDao.search(params));
|
||||||
|
assertEquals(1, encs.size());
|
||||||
|
|
||||||
|
params = new SearchParameterMap();
|
||||||
|
params.add(Encounter.SP_DATE, new DateRangeParam("2001-01-03", null));
|
||||||
|
params.add(Encounter.SP_IDENTIFIER, new IdentifierDt("testDatePeriodParam", "02"));
|
||||||
|
encs = toList(myEncounterDao.search(params));
|
||||||
|
assertEquals(0, encs.size());
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testDatePeriodParamStartAndEnd() {
|
||||||
|
{
|
||||||
|
Encounter enc = new Encounter();
|
||||||
|
enc.addIdentifier().setSystem("testDatePeriodParam").setValue("03");
|
||||||
|
enc.getPeriod().getStartElement().setValueAsString("2001-01-02");
|
||||||
|
enc.getPeriod().getEndElement().setValueAsString("2001-01-03");
|
||||||
|
myEncounterDao.create(enc, mySrd);
|
||||||
|
}
|
||||||
|
|
||||||
|
SearchParameterMap params = new SearchParameterMap();
|
||||||
|
params.add(Encounter.SP_DATE, new DateRangeParam("2001-01-01", "2001-01-03"));
|
||||||
|
params.add(Encounter.SP_IDENTIFIER, new IdentifierDt("testDatePeriodParam", "03"));
|
||||||
|
List<Encounter> encs = toList(myEncounterDao.search(params));
|
||||||
|
assertEquals(1, encs.size());
|
||||||
|
|
||||||
|
params = new SearchParameterMap();
|
||||||
|
params.add(Encounter.SP_DATE, new DateRangeParam("2001-01-02", "2001-01-06"));
|
||||||
|
params.add(Encounter.SP_IDENTIFIER, new IdentifierDt("testDatePeriodParam", "03"));
|
||||||
|
encs = toList(myEncounterDao.search(params));
|
||||||
|
assertEquals(1, encs.size());
|
||||||
|
|
||||||
|
params = new SearchParameterMap();
|
||||||
|
params.add(Encounter.SP_DATE, new DateRangeParam("2001-01-01", null));
|
||||||
|
params.add(Encounter.SP_IDENTIFIER, new IdentifierDt("testDatePeriodParam", "03"));
|
||||||
|
encs = toList(myEncounterDao.search(params));
|
||||||
|
assertEquals(1, encs.size());
|
||||||
|
|
||||||
|
params = new SearchParameterMap();
|
||||||
|
params.add(Encounter.SP_DATE, new DateRangeParam(null, "2001-01-03"));
|
||||||
|
params.add(Encounter.SP_IDENTIFIER, new IdentifierDt("testDatePeriodParam", "03"));
|
||||||
|
encs = toList(myEncounterDao.search(params));
|
||||||
|
assertEquals(1, encs.size());
|
||||||
|
|
||||||
|
params = new SearchParameterMap();
|
||||||
|
params.add(Encounter.SP_DATE, new DateRangeParam(null, "2001-01-05"));
|
||||||
|
params.add(Encounter.SP_IDENTIFIER, new IdentifierDt("testDatePeriodParam", "03"));
|
||||||
|
encs = toList(myEncounterDao.search(params));
|
||||||
|
assertEquals(1, encs.size());
|
||||||
|
|
||||||
|
params = new SearchParameterMap();
|
||||||
|
params.add(Encounter.SP_DATE, new DateRangeParam(null, "2001-01-01"));
|
||||||
|
params.add(Encounter.SP_IDENTIFIER, new IdentifierDt("testDatePeriodParam", "03"));
|
||||||
|
encs = toList(myEncounterDao.search(params));
|
||||||
|
assertEquals(0, encs.size());
|
||||||
|
|
||||||
|
params = new SearchParameterMap();
|
||||||
|
params.add(Encounter.SP_DATE, new DateRangeParam("2001-01-05", null));
|
||||||
|
params.add(Encounter.SP_IDENTIFIER, new IdentifierDt("testDatePeriodParam", "03"));
|
||||||
|
encs = toList(myEncounterDao.search(params));
|
||||||
|
assertEquals(0, encs.size());
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testDatePeriodParamStartOnly() {
|
||||||
|
{
|
||||||
|
Encounter enc = new Encounter();
|
||||||
|
enc.addIdentifier().setSystem("testDatePeriodParam").setValue("01");
|
||||||
|
enc.getPeriod().getStartElement().setValueAsString("2001-01-02");
|
||||||
|
myEncounterDao.create(enc, mySrd);
|
||||||
|
}
|
||||||
|
|
||||||
|
SearchParameterMap params = new SearchParameterMap();
|
||||||
|
params.add(Encounter.SP_DATE, new DateRangeParam("2001-01-01", "2001-01-03"));
|
||||||
|
params.add(Encounter.SP_IDENTIFIER, new IdentifierDt("testDatePeriodParam", "01"));
|
||||||
|
List<Encounter> encs = toList(myEncounterDao.search(params));
|
||||||
|
assertEquals(1, encs.size());
|
||||||
|
|
||||||
|
params = new SearchParameterMap();
|
||||||
|
params.add(Encounter.SP_DATE, new DateRangeParam("2001-01-01", null));
|
||||||
|
params.add(Encounter.SP_IDENTIFIER, new IdentifierDt("testDatePeriodParam", "01"));
|
||||||
|
encs = toList(myEncounterDao.search(params));
|
||||||
|
assertEquals(1, encs.size());
|
||||||
|
|
||||||
|
params = new SearchParameterMap();
|
||||||
|
params.add(Encounter.SP_DATE, new DateRangeParam(null, "2001-01-03"));
|
||||||
|
params.add(Encounter.SP_IDENTIFIER, new IdentifierDt("testDatePeriodParam", "01"));
|
||||||
|
encs = toList(myEncounterDao.search(params));
|
||||||
|
assertEquals(1, encs.size());
|
||||||
|
|
||||||
|
params = new SearchParameterMap();
|
||||||
|
params.add(Encounter.SP_DATE, new DateRangeParam(null, "2001-01-01"));
|
||||||
|
params.add(Encounter.SP_IDENTIFIER, new IdentifierDt("testDatePeriodParam", "01"));
|
||||||
|
encs = toList(myEncounterDao.search(params));
|
||||||
|
assertEquals(0, encs.size());
|
||||||
|
|
||||||
|
params = new SearchParameterMap();
|
||||||
|
params.add(Encounter.SP_DATE, new DateRangeParam("2001-01-03", null));
|
||||||
|
params.add(Encounter.SP_IDENTIFIER, new IdentifierDt("testDatePeriodParam", "01"));
|
||||||
|
encs = toList(myEncounterDao.search(params));
|
||||||
|
assertEquals(1, encs.size());
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testSearchCompositeParam() {
|
public void testSearchCompositeParam() {
|
||||||
Observation o1 = new Observation();
|
Observation o1 = new Observation();
|
||||||
|
|
|
@ -584,143 +584,6 @@ public class FhirResourceDaoDstu2Test extends BaseJpaDstu2Test {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testDatePeriodParamEndOnly() {
|
|
||||||
{
|
|
||||||
Encounter enc = new Encounter();
|
|
||||||
enc.addIdentifier().setSystem("testDatePeriodParam").setValue("02");
|
|
||||||
enc.getPeriod().getEndElement().setValueAsString("2001-01-02");
|
|
||||||
myEncounterDao.create(enc, mySrd);
|
|
||||||
}
|
|
||||||
SearchParameterMap params;
|
|
||||||
List<Encounter> encs;
|
|
||||||
|
|
||||||
params = new SearchParameterMap();
|
|
||||||
params.add(Encounter.SP_DATE, new DateRangeParam(null, "2001-01-03"));
|
|
||||||
params.add(Encounter.SP_IDENTIFIER, new IdentifierDt("testDatePeriodParam", "02"));
|
|
||||||
encs = toList(myEncounterDao.search(params));
|
|
||||||
assertEquals(1, encs.size());
|
|
||||||
|
|
||||||
params = new SearchParameterMap();
|
|
||||||
params.add(Encounter.SP_DATE, new DateRangeParam("2001-01-01", "2001-01-03"));
|
|
||||||
params.add(Encounter.SP_IDENTIFIER, new IdentifierDt("testDatePeriodParam", "02"));
|
|
||||||
// encs = toList(ourEncounterDao.search(params));
|
|
||||||
// assertEquals(1, encs.size());
|
|
||||||
|
|
||||||
params = new SearchParameterMap();
|
|
||||||
params.add(Encounter.SP_DATE, new DateRangeParam("2001-01-01", null));
|
|
||||||
params.add(Encounter.SP_IDENTIFIER, new IdentifierDt("testDatePeriodParam", "02"));
|
|
||||||
encs = toList(myEncounterDao.search(params));
|
|
||||||
assertEquals(1, encs.size());
|
|
||||||
|
|
||||||
params = new SearchParameterMap();
|
|
||||||
params.add(Encounter.SP_DATE, new DateRangeParam(null, "2001-01-01"));
|
|
||||||
params.add(Encounter.SP_IDENTIFIER, new IdentifierDt("testDatePeriodParam", "02"));
|
|
||||||
encs = toList(myEncounterDao.search(params));
|
|
||||||
assertEquals(0, encs.size());
|
|
||||||
|
|
||||||
params = new SearchParameterMap();
|
|
||||||
params.add(Encounter.SP_DATE, new DateRangeParam("2001-01-03", null));
|
|
||||||
params.add(Encounter.SP_IDENTIFIER, new IdentifierDt("testDatePeriodParam", "02"));
|
|
||||||
encs = toList(myEncounterDao.search(params));
|
|
||||||
assertEquals(0, encs.size());
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testDatePeriodParamStartAndEnd() {
|
|
||||||
{
|
|
||||||
Encounter enc = new Encounter();
|
|
||||||
enc.addIdentifier().setSystem("testDatePeriodParam").setValue("03");
|
|
||||||
enc.getPeriod().getStartElement().setValueAsString("2001-01-02");
|
|
||||||
enc.getPeriod().getEndElement().setValueAsString("2001-01-03");
|
|
||||||
myEncounterDao.create(enc, mySrd);
|
|
||||||
}
|
|
||||||
|
|
||||||
SearchParameterMap params = new SearchParameterMap();
|
|
||||||
params.add(Encounter.SP_DATE, new DateRangeParam("2001-01-01", "2001-01-03"));
|
|
||||||
params.add(Encounter.SP_IDENTIFIER, new IdentifierDt("testDatePeriodParam", "03"));
|
|
||||||
List<Encounter> encs = toList(myEncounterDao.search(params));
|
|
||||||
assertEquals(1, encs.size());
|
|
||||||
|
|
||||||
params = new SearchParameterMap();
|
|
||||||
params.add(Encounter.SP_DATE, new DateRangeParam("2001-01-02", "2001-01-06"));
|
|
||||||
params.add(Encounter.SP_IDENTIFIER, new IdentifierDt("testDatePeriodParam", "03"));
|
|
||||||
encs = toList(myEncounterDao.search(params));
|
|
||||||
assertEquals(1, encs.size());
|
|
||||||
|
|
||||||
params = new SearchParameterMap();
|
|
||||||
params.add(Encounter.SP_DATE, new DateRangeParam("2001-01-01", null));
|
|
||||||
params.add(Encounter.SP_IDENTIFIER, new IdentifierDt("testDatePeriodParam", "03"));
|
|
||||||
encs = toList(myEncounterDao.search(params));
|
|
||||||
assertEquals(1, encs.size());
|
|
||||||
|
|
||||||
params = new SearchParameterMap();
|
|
||||||
params.add(Encounter.SP_DATE, new DateRangeParam(null, "2001-01-03"));
|
|
||||||
params.add(Encounter.SP_IDENTIFIER, new IdentifierDt("testDatePeriodParam", "03"));
|
|
||||||
encs = toList(myEncounterDao.search(params));
|
|
||||||
assertEquals(1, encs.size());
|
|
||||||
|
|
||||||
params = new SearchParameterMap();
|
|
||||||
params.add(Encounter.SP_DATE, new DateRangeParam(null, "2001-01-05"));
|
|
||||||
params.add(Encounter.SP_IDENTIFIER, new IdentifierDt("testDatePeriodParam", "03"));
|
|
||||||
encs = toList(myEncounterDao.search(params));
|
|
||||||
assertEquals(1, encs.size());
|
|
||||||
|
|
||||||
params = new SearchParameterMap();
|
|
||||||
params.add(Encounter.SP_DATE, new DateRangeParam(null, "2001-01-01"));
|
|
||||||
params.add(Encounter.SP_IDENTIFIER, new IdentifierDt("testDatePeriodParam", "03"));
|
|
||||||
encs = toList(myEncounterDao.search(params));
|
|
||||||
assertEquals(0, encs.size());
|
|
||||||
|
|
||||||
params = new SearchParameterMap();
|
|
||||||
params.add(Encounter.SP_DATE, new DateRangeParam("2001-01-05", null));
|
|
||||||
params.add(Encounter.SP_IDENTIFIER, new IdentifierDt("testDatePeriodParam", "03"));
|
|
||||||
encs = toList(myEncounterDao.search(params));
|
|
||||||
assertEquals(0, encs.size());
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testDatePeriodParamStartOnly() {
|
|
||||||
{
|
|
||||||
Encounter enc = new Encounter();
|
|
||||||
enc.addIdentifier().setSystem("testDatePeriodParam").setValue("01");
|
|
||||||
enc.getPeriod().getStartElement().setValueAsString("2001-01-02");
|
|
||||||
myEncounterDao.create(enc, mySrd);
|
|
||||||
}
|
|
||||||
|
|
||||||
SearchParameterMap params = new SearchParameterMap();
|
|
||||||
params.add(Encounter.SP_DATE, new DateRangeParam("2001-01-01", "2001-01-03"));
|
|
||||||
params.add(Encounter.SP_IDENTIFIER, new IdentifierDt("testDatePeriodParam", "01"));
|
|
||||||
List<Encounter> encs = toList(myEncounterDao.search(params));
|
|
||||||
assertEquals(1, encs.size());
|
|
||||||
|
|
||||||
params = new SearchParameterMap();
|
|
||||||
params.add(Encounter.SP_DATE, new DateRangeParam("2001-01-01", null));
|
|
||||||
params.add(Encounter.SP_IDENTIFIER, new IdentifierDt("testDatePeriodParam", "01"));
|
|
||||||
encs = toList(myEncounterDao.search(params));
|
|
||||||
assertEquals(1, encs.size());
|
|
||||||
|
|
||||||
params = new SearchParameterMap();
|
|
||||||
params.add(Encounter.SP_DATE, new DateRangeParam(null, "2001-01-03"));
|
|
||||||
params.add(Encounter.SP_IDENTIFIER, new IdentifierDt("testDatePeriodParam", "01"));
|
|
||||||
encs = toList(myEncounterDao.search(params));
|
|
||||||
assertEquals(1, encs.size());
|
|
||||||
|
|
||||||
params = new SearchParameterMap();
|
|
||||||
params.add(Encounter.SP_DATE, new DateRangeParam(null, "2001-01-01"));
|
|
||||||
params.add(Encounter.SP_IDENTIFIER, new IdentifierDt("testDatePeriodParam", "01"));
|
|
||||||
encs = toList(myEncounterDao.search(params));
|
|
||||||
assertEquals(0, encs.size());
|
|
||||||
|
|
||||||
params = new SearchParameterMap();
|
|
||||||
params.add(Encounter.SP_DATE, new DateRangeParam("2001-01-03", null));
|
|
||||||
params.add(Encounter.SP_IDENTIFIER, new IdentifierDt("testDatePeriodParam", "01"));
|
|
||||||
encs = toList(myEncounterDao.search(params));
|
|
||||||
assertEquals(0, encs.size());
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testDeleteFailsIfIncomingLinks() {
|
public void testDeleteFailsIfIncomingLinks() {
|
||||||
|
|
|
@ -959,6 +959,145 @@ public class FhirResourceDaoDstu3SearchNoFtTest extends BaseJpaDstu3Test {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testDatePeriodParamEndOnly() {
|
||||||
|
{
|
||||||
|
Encounter enc = new Encounter();
|
||||||
|
enc.addIdentifier().setSystem("testDatePeriodParam").setValue("02");
|
||||||
|
enc.getPeriod().getEndElement().setValueAsString("2001-01-02");
|
||||||
|
myEncounterDao.create(enc, mySrd);
|
||||||
|
}
|
||||||
|
SearchParameterMap params;
|
||||||
|
List<Encounter> encs;
|
||||||
|
|
||||||
|
params = new SearchParameterMap();
|
||||||
|
params.add(Encounter.SP_DATE, new DateRangeParam(null, "2001-01-03"));
|
||||||
|
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "02"));
|
||||||
|
encs = toList(myEncounterDao.search(params));
|
||||||
|
assertEquals(1, encs.size());
|
||||||
|
|
||||||
|
params = new SearchParameterMap();
|
||||||
|
params.add(Encounter.SP_DATE, new DateRangeParam("2001-01-01", "2001-01-03"));
|
||||||
|
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "02"));
|
||||||
|
// encs = toList(ourEncounterDao.search(params));
|
||||||
|
// assertEquals(1, encs.size());
|
||||||
|
|
||||||
|
params = new SearchParameterMap();
|
||||||
|
params.add(Encounter.SP_DATE, new DateRangeParam("2001-01-01", null));
|
||||||
|
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "02"));
|
||||||
|
encs = toList(myEncounterDao.search(params));
|
||||||
|
assertEquals(1, encs.size());
|
||||||
|
|
||||||
|
params = new SearchParameterMap();
|
||||||
|
params.add(Encounter.SP_DATE, new DateRangeParam(null, "2001-01-01"));
|
||||||
|
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "02"));
|
||||||
|
encs = toList(myEncounterDao.search(params));
|
||||||
|
assertEquals(1, encs.size());
|
||||||
|
|
||||||
|
params = new SearchParameterMap();
|
||||||
|
params.add(Encounter.SP_DATE, new DateRangeParam("2001-01-03", null));
|
||||||
|
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "02"));
|
||||||
|
encs = toList(myEncounterDao.search(params));
|
||||||
|
assertEquals(0, encs.size());
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testDatePeriodParamStartAndEnd() {
|
||||||
|
{
|
||||||
|
Encounter enc = new Encounter();
|
||||||
|
enc.addIdentifier().setSystem("testDatePeriodParam").setValue("03");
|
||||||
|
enc.getPeriod().getStartElement().setValueAsString("2001-01-02");
|
||||||
|
enc.getPeriod().getEndElement().setValueAsString("2001-01-03");
|
||||||
|
myEncounterDao.create(enc, mySrd);
|
||||||
|
}
|
||||||
|
|
||||||
|
SearchParameterMap params = new SearchParameterMap();
|
||||||
|
params.add(Encounter.SP_DATE, new DateRangeParam("2001-01-01", "2001-01-03"));
|
||||||
|
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "03"));
|
||||||
|
List<Encounter> encs = toList(myEncounterDao.search(params));
|
||||||
|
assertEquals(1, encs.size());
|
||||||
|
|
||||||
|
params = new SearchParameterMap();
|
||||||
|
params.add(Encounter.SP_DATE, new DateRangeParam("2001-01-02", "2001-01-06"));
|
||||||
|
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "03"));
|
||||||
|
encs = toList(myEncounterDao.search(params));
|
||||||
|
assertEquals(1, encs.size());
|
||||||
|
|
||||||
|
params = new SearchParameterMap();
|
||||||
|
params.add(Encounter.SP_DATE, new DateRangeParam("2001-01-01", null));
|
||||||
|
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "03"));
|
||||||
|
encs = toList(myEncounterDao.search(params));
|
||||||
|
assertEquals(1, encs.size());
|
||||||
|
|
||||||
|
params = new SearchParameterMap();
|
||||||
|
params.add(Encounter.SP_DATE, new DateRangeParam(null, "2001-01-03"));
|
||||||
|
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "03"));
|
||||||
|
encs = toList(myEncounterDao.search(params));
|
||||||
|
assertEquals(1, encs.size());
|
||||||
|
|
||||||
|
params = new SearchParameterMap();
|
||||||
|
params.add(Encounter.SP_DATE, new DateRangeParam(null, "2001-01-05"));
|
||||||
|
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "03"));
|
||||||
|
encs = toList(myEncounterDao.search(params));
|
||||||
|
assertEquals(1, encs.size());
|
||||||
|
|
||||||
|
params = new SearchParameterMap();
|
||||||
|
params.add(Encounter.SP_DATE, new DateRangeParam(null, "2001-01-01"));
|
||||||
|
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "03"));
|
||||||
|
encs = toList(myEncounterDao.search(params));
|
||||||
|
assertEquals(0, encs.size());
|
||||||
|
|
||||||
|
params = new SearchParameterMap();
|
||||||
|
params.add(Encounter.SP_DATE, new DateRangeParam("2001-01-05", null));
|
||||||
|
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "03"));
|
||||||
|
encs = toList(myEncounterDao.search(params));
|
||||||
|
assertEquals(0, encs.size());
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testDatePeriodParamStartOnly() {
|
||||||
|
{
|
||||||
|
Encounter enc = new Encounter();
|
||||||
|
enc.addIdentifier().setSystem("testDatePeriodParam").setValue("01");
|
||||||
|
enc.getPeriod().getStartElement().setValueAsString("2001-01-02");
|
||||||
|
myEncounterDao.create(enc, mySrd);
|
||||||
|
}
|
||||||
|
|
||||||
|
SearchParameterMap params = new SearchParameterMap();
|
||||||
|
params.add(Encounter.SP_DATE, new DateRangeParam("2001-01-01", "2001-01-03"));
|
||||||
|
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "01"));
|
||||||
|
List<Encounter> encs = toList(myEncounterDao.search(params));
|
||||||
|
assertEquals(1, encs.size());
|
||||||
|
|
||||||
|
params = new SearchParameterMap();
|
||||||
|
params.add(Encounter.SP_DATE, new DateRangeParam("2001-01-01", null));
|
||||||
|
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "01"));
|
||||||
|
encs = toList(myEncounterDao.search(params));
|
||||||
|
assertEquals(1, encs.size());
|
||||||
|
|
||||||
|
params = new SearchParameterMap();
|
||||||
|
params.add(Encounter.SP_DATE, new DateRangeParam(null, "2001-01-03"));
|
||||||
|
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "01"));
|
||||||
|
encs = toList(myEncounterDao.search(params));
|
||||||
|
assertEquals(1, encs.size());
|
||||||
|
|
||||||
|
params = new SearchParameterMap();
|
||||||
|
params.add(Encounter.SP_DATE, new DateRangeParam(null, "2001-01-01"));
|
||||||
|
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "01"));
|
||||||
|
encs = toList(myEncounterDao.search(params));
|
||||||
|
assertEquals(0, encs.size());
|
||||||
|
|
||||||
|
params = new SearchParameterMap();
|
||||||
|
params.add(Encounter.SP_DATE, new DateRangeParam("2001-01-03", null));
|
||||||
|
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "01"));
|
||||||
|
encs = toList(myEncounterDao.search(params));
|
||||||
|
assertEquals(1, encs.size());
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* #222
|
* #222
|
||||||
*/
|
*/
|
||||||
|
|
|
@ -870,143 +870,6 @@ public class FhirResourceDaoDstu3Test extends BaseJpaDstu3Test {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testDatePeriodParamEndOnly() {
|
|
||||||
{
|
|
||||||
Encounter enc = new Encounter();
|
|
||||||
enc.addIdentifier().setSystem("testDatePeriodParam").setValue("02");
|
|
||||||
enc.getPeriod().getEndElement().setValueAsString("2001-01-02");
|
|
||||||
myEncounterDao.create(enc, mySrd);
|
|
||||||
}
|
|
||||||
SearchParameterMap params;
|
|
||||||
List<Encounter> encs;
|
|
||||||
|
|
||||||
params = new SearchParameterMap();
|
|
||||||
params.add(Encounter.SP_DATE, new DateRangeParam(null, "2001-01-03"));
|
|
||||||
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "02"));
|
|
||||||
encs = toList(myEncounterDao.search(params));
|
|
||||||
assertEquals(1, encs.size());
|
|
||||||
|
|
||||||
params = new SearchParameterMap();
|
|
||||||
params.add(Encounter.SP_DATE, new DateRangeParam("2001-01-01", "2001-01-03"));
|
|
||||||
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "02"));
|
|
||||||
// encs = toList(ourEncounterDao.search(params));
|
|
||||||
// assertEquals(1, encs.size());
|
|
||||||
|
|
||||||
params = new SearchParameterMap();
|
|
||||||
params.add(Encounter.SP_DATE, new DateRangeParam("2001-01-01", null));
|
|
||||||
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "02"));
|
|
||||||
encs = toList(myEncounterDao.search(params));
|
|
||||||
assertEquals(1, encs.size());
|
|
||||||
|
|
||||||
params = new SearchParameterMap();
|
|
||||||
params.add(Encounter.SP_DATE, new DateRangeParam(null, "2001-01-01"));
|
|
||||||
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "02"));
|
|
||||||
encs = toList(myEncounterDao.search(params));
|
|
||||||
assertEquals(0, encs.size());
|
|
||||||
|
|
||||||
params = new SearchParameterMap();
|
|
||||||
params.add(Encounter.SP_DATE, new DateRangeParam("2001-01-03", null));
|
|
||||||
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "02"));
|
|
||||||
encs = toList(myEncounterDao.search(params));
|
|
||||||
assertEquals(0, encs.size());
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testDatePeriodParamStartAndEnd() {
|
|
||||||
{
|
|
||||||
Encounter enc = new Encounter();
|
|
||||||
enc.addIdentifier().setSystem("testDatePeriodParam").setValue("03");
|
|
||||||
enc.getPeriod().getStartElement().setValueAsString("2001-01-02");
|
|
||||||
enc.getPeriod().getEndElement().setValueAsString("2001-01-03");
|
|
||||||
myEncounterDao.create(enc, mySrd);
|
|
||||||
}
|
|
||||||
|
|
||||||
SearchParameterMap params = new SearchParameterMap();
|
|
||||||
params.add(Encounter.SP_DATE, new DateRangeParam("2001-01-01", "2001-01-03"));
|
|
||||||
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "03"));
|
|
||||||
List<Encounter> encs = toList(myEncounterDao.search(params));
|
|
||||||
assertEquals(1, encs.size());
|
|
||||||
|
|
||||||
params = new SearchParameterMap();
|
|
||||||
params.add(Encounter.SP_DATE, new DateRangeParam("2001-01-02", "2001-01-06"));
|
|
||||||
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "03"));
|
|
||||||
encs = toList(myEncounterDao.search(params));
|
|
||||||
assertEquals(1, encs.size());
|
|
||||||
|
|
||||||
params = new SearchParameterMap();
|
|
||||||
params.add(Encounter.SP_DATE, new DateRangeParam("2001-01-01", null));
|
|
||||||
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "03"));
|
|
||||||
encs = toList(myEncounterDao.search(params));
|
|
||||||
assertEquals(1, encs.size());
|
|
||||||
|
|
||||||
params = new SearchParameterMap();
|
|
||||||
params.add(Encounter.SP_DATE, new DateRangeParam(null, "2001-01-03"));
|
|
||||||
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "03"));
|
|
||||||
encs = toList(myEncounterDao.search(params));
|
|
||||||
assertEquals(1, encs.size());
|
|
||||||
|
|
||||||
params = new SearchParameterMap();
|
|
||||||
params.add(Encounter.SP_DATE, new DateRangeParam(null, "2001-01-05"));
|
|
||||||
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "03"));
|
|
||||||
encs = toList(myEncounterDao.search(params));
|
|
||||||
assertEquals(1, encs.size());
|
|
||||||
|
|
||||||
params = new SearchParameterMap();
|
|
||||||
params.add(Encounter.SP_DATE, new DateRangeParam(null, "2001-01-01"));
|
|
||||||
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "03"));
|
|
||||||
encs = toList(myEncounterDao.search(params));
|
|
||||||
assertEquals(0, encs.size());
|
|
||||||
|
|
||||||
params = new SearchParameterMap();
|
|
||||||
params.add(Encounter.SP_DATE, new DateRangeParam("2001-01-05", null));
|
|
||||||
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "03"));
|
|
||||||
encs = toList(myEncounterDao.search(params));
|
|
||||||
assertEquals(0, encs.size());
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testDatePeriodParamStartOnly() {
|
|
||||||
{
|
|
||||||
Encounter enc = new Encounter();
|
|
||||||
enc.addIdentifier().setSystem("testDatePeriodParam").setValue("01");
|
|
||||||
enc.getPeriod().getStartElement().setValueAsString("2001-01-02");
|
|
||||||
myEncounterDao.create(enc, mySrd);
|
|
||||||
}
|
|
||||||
|
|
||||||
SearchParameterMap params = new SearchParameterMap();
|
|
||||||
params.add(Encounter.SP_DATE, new DateRangeParam("2001-01-01", "2001-01-03"));
|
|
||||||
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "01"));
|
|
||||||
List<Encounter> encs = toList(myEncounterDao.search(params));
|
|
||||||
assertEquals(1, encs.size());
|
|
||||||
|
|
||||||
params = new SearchParameterMap();
|
|
||||||
params.add(Encounter.SP_DATE, new DateRangeParam("2001-01-01", null));
|
|
||||||
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "01"));
|
|
||||||
encs = toList(myEncounterDao.search(params));
|
|
||||||
assertEquals(1, encs.size());
|
|
||||||
|
|
||||||
params = new SearchParameterMap();
|
|
||||||
params.add(Encounter.SP_DATE, new DateRangeParam(null, "2001-01-03"));
|
|
||||||
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "01"));
|
|
||||||
encs = toList(myEncounterDao.search(params));
|
|
||||||
assertEquals(1, encs.size());
|
|
||||||
|
|
||||||
params = new SearchParameterMap();
|
|
||||||
params.add(Encounter.SP_DATE, new DateRangeParam(null, "2001-01-01"));
|
|
||||||
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "01"));
|
|
||||||
encs = toList(myEncounterDao.search(params));
|
|
||||||
assertEquals(0, encs.size());
|
|
||||||
|
|
||||||
params = new SearchParameterMap();
|
|
||||||
params.add(Encounter.SP_DATE, new DateRangeParam("2001-01-03", null));
|
|
||||||
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "01"));
|
|
||||||
encs = toList(myEncounterDao.search(params));
|
|
||||||
assertEquals(0, encs.size());
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testDeleteFailsIfIncomingLinks() {
|
public void testDeleteFailsIfIncomingLinks() {
|
||||||
|
|
|
@ -0,0 +1,543 @@
|
||||||
|
package ca.uhn.fhir.jpa.dao.r4;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.context.FhirContext;
|
||||||
|
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||||
|
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoObservation;
|
||||||
|
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoPatient;
|
||||||
|
import ca.uhn.fhir.jpa.config.TestR4ConfigWithElasticsearchClient;
|
||||||
|
import ca.uhn.fhir.jpa.dao.BaseJpaTest;
|
||||||
|
import ca.uhn.fhir.jpa.dao.SearchBuilder;
|
||||||
|
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||||
|
import ca.uhn.fhir.rest.param.ReferenceAndListParam;
|
||||||
|
import ca.uhn.fhir.rest.param.ReferenceOrListParam;
|
||||||
|
import ca.uhn.fhir.rest.param.ReferenceParam;
|
||||||
|
import ca.uhn.fhir.rest.param.TokenAndListParam;
|
||||||
|
import ca.uhn.fhir.rest.param.TokenOrListParam;
|
||||||
|
import ca.uhn.fhir.rest.param.TokenParam;
|
||||||
|
import ca.uhn.fhir.rest.server.RestfulServer;
|
||||||
|
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
||||||
|
import ca.uhn.fhir.util.TestUtil;
|
||||||
|
import org.hl7.fhir.instance.model.api.IIdType;
|
||||||
|
import org.hl7.fhir.r4.model.DateTimeType;
|
||||||
|
import org.hl7.fhir.r4.model.Observation;
|
||||||
|
import org.hl7.fhir.r4.model.Patient;
|
||||||
|
import org.hl7.fhir.r4.model.StringType;
|
||||||
|
import org.junit.After;
|
||||||
|
import org.junit.AfterClass;
|
||||||
|
import org.junit.Before;
|
||||||
|
import org.junit.Test;
|
||||||
|
import org.junit.runner.RunWith;
|
||||||
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
|
import org.springframework.test.context.ContextConfiguration;
|
||||||
|
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
|
||||||
|
import org.springframework.transaction.PlatformTransactionManager;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Calendar;
|
||||||
|
import java.util.Date;
|
||||||
|
import java.util.GregorianCalendar;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
import static org.hamcrest.Matchers.matchesPattern;
|
||||||
|
import static org.junit.Assert.assertEquals;
|
||||||
|
import static org.junit.Assert.assertThat;
|
||||||
|
import static org.junit.Assert.assertTrue;
|
||||||
|
import static org.mockito.Mockito.when;
|
||||||
|
|
||||||
|
@RunWith(SpringJUnit4ClassRunner.class)
|
||||||
|
@ContextConfiguration(classes = { TestR4ConfigWithElasticsearchClient.class })
|
||||||
|
public class BaseR4SearchLastN extends BaseJpaTest {
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
@Qualifier("myPatientDaoR4")
|
||||||
|
protected IFhirResourceDaoPatient<Patient> myPatientDao;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
@Qualifier("myObservationDaoR4")
|
||||||
|
protected IFhirResourceDaoObservation<Observation> myObservationDao;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
protected DaoConfig myDaoConfig;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
protected FhirContext myFhirCtx;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
protected PlatformTransactionManager myPlatformTransactionManager;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected FhirContext getContext() {
|
||||||
|
return myFhirCtx;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected PlatformTransactionManager getTxManager() {
|
||||||
|
return myPlatformTransactionManager;
|
||||||
|
}
|
||||||
|
|
||||||
|
protected final String observationCd0 = "code0";
|
||||||
|
protected final String observationCd1 = "code1";
|
||||||
|
protected final String observationCd2 = "code2";
|
||||||
|
private final String observationCd3 = "code3";
|
||||||
|
|
||||||
|
protected final String categoryCd0 = "category0";
|
||||||
|
private final String categoryCd1 = "category1";
|
||||||
|
private final String categoryCd2 = "category2";
|
||||||
|
private final String categoryCd3 = "category3";
|
||||||
|
|
||||||
|
protected final String codeSystem = "http://mycode.com";
|
||||||
|
private final String categorySystem = "http://mycategory.com";
|
||||||
|
|
||||||
|
// Using static variables including the flag below so that we can initalize the database and indexes once
|
||||||
|
// (all of the tests only read from the DB and indexes and so no need to re-initialze them for each test).
|
||||||
|
private static boolean dataLoaded = false;
|
||||||
|
|
||||||
|
protected static IIdType patient0Id = null;
|
||||||
|
protected static IIdType patient1Id = null;
|
||||||
|
protected static IIdType patient2Id = null;
|
||||||
|
|
||||||
|
private static final Map<String, String> observationPatientMap = new HashMap<>();
|
||||||
|
private static final Map<String, String> observationCategoryMap = new HashMap<>();
|
||||||
|
private static final Map<String, String> observationCodeMap = new HashMap<>();
|
||||||
|
private static final Map<String, Date> observationEffectiveMap = new HashMap<>();
|
||||||
|
|
||||||
|
@Before
|
||||||
|
public void beforeCreateTestPatientsAndObservations() {
|
||||||
|
// Using a static flag to ensure that test data and elasticsearch index is only created once.
|
||||||
|
// Creating this data and the index is time consuming and as such want to avoid having to repeat for each test.
|
||||||
|
// Normally would use a static @BeforeClass method for this purpose, but Autowired objects cannot be accessed in static methods.
|
||||||
|
if(!dataLoaded) {
|
||||||
|
Patient pt = new Patient();
|
||||||
|
pt.addName().setFamily("Lastn").addGiven("Arthur");
|
||||||
|
patient0Id = myPatientDao.create(pt, mockSrd()).getId().toUnqualifiedVersionless();
|
||||||
|
createObservationsForPatient(patient0Id);
|
||||||
|
pt = new Patient();
|
||||||
|
pt.addName().setFamily("Lastn").addGiven("Johnathan");
|
||||||
|
patient1Id = myPatientDao.create(pt, mockSrd()).getId().toUnqualifiedVersionless();
|
||||||
|
createObservationsForPatient(patient1Id);
|
||||||
|
pt = new Patient();
|
||||||
|
pt.addName().setFamily("Lastn").addGiven("Michael");
|
||||||
|
patient2Id = myPatientDao.create(pt, mockSrd()).getId().toUnqualifiedVersionless();
|
||||||
|
createObservationsForPatient(patient2Id);
|
||||||
|
dataLoaded = true;
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private void createObservationsForPatient(IIdType thePatientId) {
|
||||||
|
createFiveObservationsForPatientCodeCategory(thePatientId,observationCd0, categoryCd0, 15);
|
||||||
|
createFiveObservationsForPatientCodeCategory(thePatientId,observationCd0, categoryCd1, 10);
|
||||||
|
createFiveObservationsForPatientCodeCategory(thePatientId,observationCd0, categoryCd2, 5);
|
||||||
|
createFiveObservationsForPatientCodeCategory(thePatientId,observationCd1, categoryCd0, 10);
|
||||||
|
createFiveObservationsForPatientCodeCategory(thePatientId,observationCd1, categoryCd1, 5);
|
||||||
|
createFiveObservationsForPatientCodeCategory(thePatientId,observationCd2, categoryCd2, 5);
|
||||||
|
createFiveObservationsForPatientCodeCategory(thePatientId,observationCd3, categoryCd3, 5);
|
||||||
|
}
|
||||||
|
|
||||||
|
private void createFiveObservationsForPatientCodeCategory(IIdType thePatientId, String theObservationCode, String theCategoryCode,
|
||||||
|
Integer theTimeOffset) {
|
||||||
|
Calendar observationDate = new GregorianCalendar();
|
||||||
|
|
||||||
|
for (int idx=0; idx<5; idx++ ) {
|
||||||
|
Observation obs = new Observation();
|
||||||
|
obs.getSubject().setReferenceElement(thePatientId);
|
||||||
|
obs.getCode().addCoding().setCode(theObservationCode).setSystem(codeSystem);
|
||||||
|
obs.setValue(new StringType(theObservationCode + "_0"));
|
||||||
|
observationDate.add(Calendar.HOUR, -theTimeOffset+idx);
|
||||||
|
Date effectiveDtm = observationDate.getTime();
|
||||||
|
obs.setEffective(new DateTimeType(effectiveDtm));
|
||||||
|
obs.getCategoryFirstRep().addCoding().setCode(theCategoryCode).setSystem(categorySystem);
|
||||||
|
String observationId = myObservationDao.create(obs, mockSrd()).getId().toUnqualifiedVersionless().getValue();
|
||||||
|
observationPatientMap.put(observationId, thePatientId.getValue());
|
||||||
|
observationCategoryMap.put(observationId, theCategoryCode);
|
||||||
|
observationCodeMap.put(observationId, theObservationCode);
|
||||||
|
observationEffectiveMap.put(observationId, effectiveDtm);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
protected ServletRequestDetails mockSrd() {
|
||||||
|
return mySrd;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testLastNAllPatients() {
|
||||||
|
|
||||||
|
SearchParameterMap params = new SearchParameterMap();
|
||||||
|
ReferenceParam subjectParam1 = new ReferenceParam("Patient", "", patient0Id.getValue());
|
||||||
|
ReferenceParam subjectParam2 = new ReferenceParam("Patient", "", patient1Id.getValue());
|
||||||
|
ReferenceParam subjectParam3 = new ReferenceParam("Patient", "", patient2Id.getValue());
|
||||||
|
params.add(Observation.SP_SUBJECT, buildReferenceAndListParam(subjectParam1, subjectParam2, subjectParam3));
|
||||||
|
|
||||||
|
List<String> sortedPatients = new ArrayList<>();
|
||||||
|
sortedPatients.add(patient0Id.getValue());
|
||||||
|
sortedPatients.add(patient1Id.getValue());
|
||||||
|
sortedPatients.add(patient2Id.getValue());
|
||||||
|
|
||||||
|
List<String> sortedObservationCodes = new ArrayList<>();
|
||||||
|
sortedObservationCodes.add(observationCd0);
|
||||||
|
sortedObservationCodes.add(observationCd1);
|
||||||
|
sortedObservationCodes.add(observationCd2);
|
||||||
|
sortedObservationCodes.add(observationCd3);
|
||||||
|
|
||||||
|
executeTestCase(params, sortedPatients, sortedObservationCodes, null,105);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testLastNNoPatients() {
|
||||||
|
|
||||||
|
SearchParameterMap params = new SearchParameterMap();
|
||||||
|
params.setLastNMax(1);
|
||||||
|
|
||||||
|
params.setLastN(true);
|
||||||
|
Map<String, String[]> requestParameters = new HashMap<>();
|
||||||
|
when(mySrd.getParameters()).thenReturn(requestParameters);
|
||||||
|
|
||||||
|
List<String> actual = toUnqualifiedVersionlessIdValues(myObservationDao.observationsLastN(params, mockSrd(),null));
|
||||||
|
|
||||||
|
assertEquals(4, actual.size());
|
||||||
|
}
|
||||||
|
|
||||||
|
private void executeTestCase(SearchParameterMap params, List<String> sortedPatients, List<String> sortedObservationCodes, List<String> theCategories, int expectedObservationCount) {
|
||||||
|
List<String> actual;
|
||||||
|
params.setLastN(true);
|
||||||
|
|
||||||
|
Map<String, String[]> requestParameters = new HashMap<>();
|
||||||
|
params.setLastNMax(100);
|
||||||
|
|
||||||
|
when(mySrd.getParameters()).thenReturn(requestParameters);
|
||||||
|
|
||||||
|
actual = toUnqualifiedVersionlessIdValues(myObservationDao.observationsLastN(params, mockSrd(),null));
|
||||||
|
|
||||||
|
assertEquals(expectedObservationCount, actual.size());
|
||||||
|
|
||||||
|
validateSorting(actual, sortedPatients, sortedObservationCodes, theCategories);
|
||||||
|
}
|
||||||
|
|
||||||
|
private void validateSorting(List<String> theObservationIds, List<String> thePatientIds, List<String> theCodes, List<String> theCategores) {
|
||||||
|
int theNextObservationIdx = 0;
|
||||||
|
// Validate patient grouping
|
||||||
|
for (String patientId : thePatientIds) {
|
||||||
|
assertEquals(patientId, observationPatientMap.get(theObservationIds.get(theNextObservationIdx)));
|
||||||
|
theNextObservationIdx = validateSortingWithinPatient(theObservationIds,theNextObservationIdx,theCodes, theCategores, patientId);
|
||||||
|
}
|
||||||
|
assertEquals(theObservationIds.size(), theNextObservationIdx);
|
||||||
|
}
|
||||||
|
|
||||||
|
private int validateSortingWithinPatient(List<String> theObservationIds, int theFirstObservationIdxForPatient, List<String> theCodes,
|
||||||
|
List<String> theCategories, String thePatientId) {
|
||||||
|
int theNextObservationIdx = theFirstObservationIdxForPatient;
|
||||||
|
for (String codeValue : theCodes) {
|
||||||
|
assertEquals(codeValue, observationCodeMap.get(theObservationIds.get(theNextObservationIdx)));
|
||||||
|
// Validate sorting within code group
|
||||||
|
theNextObservationIdx = validateSortingWithinCode(theObservationIds,theNextObservationIdx,
|
||||||
|
observationCodeMap.get(theObservationIds.get(theNextObservationIdx)), theCategories, thePatientId);
|
||||||
|
}
|
||||||
|
return theNextObservationIdx;
|
||||||
|
}
|
||||||
|
|
||||||
|
private int validateSortingWithinCode(List<String> theObservationIds, int theFirstObservationIdxForPatientAndCode, String theObservationCode,
|
||||||
|
List<String> theCategories, String thePatientId) {
|
||||||
|
int theNextObservationIdx = theFirstObservationIdxForPatientAndCode;
|
||||||
|
Date lastEffectiveDt = observationEffectiveMap.get(theObservationIds.get(theNextObservationIdx));
|
||||||
|
theNextObservationIdx++;
|
||||||
|
while(theObservationCode.equals(observationCodeMap.get(theObservationIds.get(theNextObservationIdx)))
|
||||||
|
&& thePatientId.equals(observationPatientMap.get(theObservationIds.get(theNextObservationIdx)))) {
|
||||||
|
// Check that effective date is before that of the previous observation.
|
||||||
|
assertTrue(lastEffectiveDt.compareTo(observationEffectiveMap.get(theObservationIds.get(theNextObservationIdx))) > 0);
|
||||||
|
lastEffectiveDt = observationEffectiveMap.get(theObservationIds.get(theNextObservationIdx));
|
||||||
|
|
||||||
|
// Check that observation is in one of the specified categories (if applicable)
|
||||||
|
if (theCategories != null && !theCategories.isEmpty()) {
|
||||||
|
assertTrue(theCategories.contains(observationCategoryMap.get(theObservationIds.get(theNextObservationIdx))));
|
||||||
|
}
|
||||||
|
theNextObservationIdx++;
|
||||||
|
if (theNextObservationIdx >= theObservationIds.size()) {
|
||||||
|
// Have reached the end of the Observation list.
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return theNextObservationIdx;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testLastNSinglePatient() {
|
||||||
|
|
||||||
|
SearchParameterMap params = new SearchParameterMap();
|
||||||
|
ReferenceParam subjectParam = new ReferenceParam("Patient", "", patient0Id.getValue());
|
||||||
|
params.add(Observation.SP_SUBJECT, buildReferenceAndListParam(subjectParam));
|
||||||
|
|
||||||
|
List<String> sortedPatients = new ArrayList<>();
|
||||||
|
sortedPatients.add(patient0Id.getValue());
|
||||||
|
|
||||||
|
List<String> sortedObservationCodes = new ArrayList<>();
|
||||||
|
sortedObservationCodes.add(observationCd0);
|
||||||
|
sortedObservationCodes.add(observationCd1);
|
||||||
|
sortedObservationCodes.add(observationCd2);
|
||||||
|
sortedObservationCodes.add(observationCd3);
|
||||||
|
|
||||||
|
executeTestCase(params, sortedPatients,sortedObservationCodes, null,35);
|
||||||
|
|
||||||
|
params = new SearchParameterMap();
|
||||||
|
ReferenceParam patientParam = new ReferenceParam("Patient", "", patient0Id.getValue());
|
||||||
|
params.add(Observation.SP_PATIENT, buildReferenceAndListParam(patientParam));
|
||||||
|
|
||||||
|
sortedPatients = new ArrayList<>();
|
||||||
|
sortedPatients.add(patient0Id.getValue());
|
||||||
|
|
||||||
|
sortedObservationCodes = new ArrayList<>();
|
||||||
|
sortedObservationCodes.add(observationCd0);
|
||||||
|
sortedObservationCodes.add(observationCd1);
|
||||||
|
sortedObservationCodes.add(observationCd2);
|
||||||
|
sortedObservationCodes.add(observationCd3);
|
||||||
|
|
||||||
|
executeTestCase(params, sortedPatients,sortedObservationCodes, null,35);
|
||||||
|
}
|
||||||
|
|
||||||
|
protected ReferenceAndListParam buildReferenceAndListParam(ReferenceParam... theReference) {
|
||||||
|
ReferenceOrListParam myReferenceOrListParam = new ReferenceOrListParam();
|
||||||
|
for (ReferenceParam referenceParam : theReference) {
|
||||||
|
myReferenceOrListParam.addOr(referenceParam);
|
||||||
|
}
|
||||||
|
return new ReferenceAndListParam().addAnd(myReferenceOrListParam);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testLastNMultiplePatients() {
|
||||||
|
|
||||||
|
// Two Subject parameters.
|
||||||
|
SearchParameterMap params = new SearchParameterMap();
|
||||||
|
ReferenceParam subjectParam1 = new ReferenceParam("Patient", "", patient0Id.getValue());
|
||||||
|
ReferenceParam subjectParam2 = new ReferenceParam("Patient", "", patient1Id.getValue());
|
||||||
|
params.add(Observation.SP_SUBJECT, buildReferenceAndListParam(subjectParam1, subjectParam2));
|
||||||
|
|
||||||
|
List<String> sortedPatients = new ArrayList<>();
|
||||||
|
sortedPatients.add(patient0Id.getValue());
|
||||||
|
sortedPatients.add(patient1Id.getValue());
|
||||||
|
|
||||||
|
List<String> sortedObservationCodes = new ArrayList<>();
|
||||||
|
sortedObservationCodes.add(observationCd0);
|
||||||
|
sortedObservationCodes.add(observationCd1);
|
||||||
|
sortedObservationCodes.add(observationCd2);
|
||||||
|
sortedObservationCodes.add(observationCd3);
|
||||||
|
|
||||||
|
executeTestCase(params, sortedPatients, sortedObservationCodes, null,70);
|
||||||
|
|
||||||
|
// Two Patient parameters
|
||||||
|
params = new SearchParameterMap();
|
||||||
|
ReferenceParam patientParam1 = new ReferenceParam("Patient", "", patient0Id.getValue());
|
||||||
|
ReferenceParam patientParam3 = new ReferenceParam("Patient", "", patient2Id.getValue());
|
||||||
|
params.add(Observation.SP_SUBJECT, buildReferenceAndListParam(patientParam1, patientParam3));
|
||||||
|
|
||||||
|
sortedPatients = new ArrayList<>();
|
||||||
|
sortedPatients.add(patient0Id.getValue());
|
||||||
|
sortedPatients.add(patient2Id.getValue());
|
||||||
|
|
||||||
|
executeTestCase(params,sortedPatients, sortedObservationCodes, null,70);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testLastNSingleCategory() {
|
||||||
|
|
||||||
|
// One category parameter.
|
||||||
|
SearchParameterMap params = new SearchParameterMap();
|
||||||
|
ReferenceParam subjectParam1 = new ReferenceParam("Patient", "", patient0Id.getValue());
|
||||||
|
ReferenceParam subjectParam2 = new ReferenceParam("Patient", "", patient1Id.getValue());
|
||||||
|
ReferenceParam subjectParam3 = new ReferenceParam("Patient", "", patient2Id.getValue());
|
||||||
|
params.add(Observation.SP_SUBJECT, buildReferenceAndListParam(subjectParam1, subjectParam2, subjectParam3));
|
||||||
|
|
||||||
|
TokenParam categoryParam = new TokenParam(categorySystem, categoryCd0);
|
||||||
|
params.add(Observation.SP_CATEGORY, buildTokenAndListParam(categoryParam));
|
||||||
|
List<String> myCategories = new ArrayList<>();
|
||||||
|
myCategories.add(categoryCd0);
|
||||||
|
|
||||||
|
List<String> sortedPatients = new ArrayList<>();
|
||||||
|
sortedPatients.add(patient0Id.getValue());
|
||||||
|
sortedPatients.add(patient1Id.getValue());
|
||||||
|
sortedPatients.add(patient2Id.getValue());
|
||||||
|
|
||||||
|
List<String> sortedObservationCodes = new ArrayList<>();
|
||||||
|
sortedObservationCodes.add(observationCd0);
|
||||||
|
sortedObservationCodes.add(observationCd1);
|
||||||
|
|
||||||
|
executeTestCase(params, sortedPatients, sortedObservationCodes, myCategories, 30);
|
||||||
|
|
||||||
|
// Another category parameter.
|
||||||
|
params = new SearchParameterMap();
|
||||||
|
params.add(Observation.SP_SUBJECT, buildReferenceAndListParam(subjectParam1, subjectParam2, subjectParam3));
|
||||||
|
categoryParam = new TokenParam(categorySystem, categoryCd2);
|
||||||
|
params.add(Observation.SP_CATEGORY, buildTokenAndListParam(categoryParam));
|
||||||
|
myCategories = new ArrayList<>();
|
||||||
|
myCategories.add(categoryCd2);
|
||||||
|
|
||||||
|
sortedObservationCodes = new ArrayList<>();
|
||||||
|
sortedObservationCodes.add(observationCd0);
|
||||||
|
sortedObservationCodes.add(observationCd2);
|
||||||
|
|
||||||
|
executeTestCase(params, sortedPatients, sortedObservationCodes, myCategories, 30);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testLastNMultipleCategories() {
|
||||||
|
|
||||||
|
// Two category parameters.
|
||||||
|
SearchParameterMap params = new SearchParameterMap();
|
||||||
|
ReferenceParam subjectParam1 = new ReferenceParam("Patient", "", patient0Id.getValue());
|
||||||
|
ReferenceParam subjectParam2 = new ReferenceParam("Patient", "", patient1Id.getValue());
|
||||||
|
ReferenceParam subjectParam3 = new ReferenceParam("Patient", "", patient2Id.getValue());
|
||||||
|
params.add(Observation.SP_SUBJECT, buildReferenceAndListParam(subjectParam1, subjectParam2, subjectParam3));
|
||||||
|
|
||||||
|
TokenParam categoryParam1 = new TokenParam(categorySystem, categoryCd0);
|
||||||
|
TokenParam categoryParam2 = new TokenParam(categorySystem, categoryCd1);
|
||||||
|
params.add(Observation.SP_CATEGORY, buildTokenAndListParam(categoryParam1, categoryParam2));
|
||||||
|
List<String> myCategories = new ArrayList<>();
|
||||||
|
myCategories.add(categoryCd0);
|
||||||
|
myCategories.add(categoryCd1);
|
||||||
|
|
||||||
|
List<String> sortedPatients = new ArrayList<>();
|
||||||
|
sortedPatients.add(patient0Id.getValue());
|
||||||
|
sortedPatients.add(patient1Id.getValue());
|
||||||
|
sortedPatients.add(patient2Id.getValue());
|
||||||
|
|
||||||
|
List<String> sortedObservationCodes = new ArrayList<>();
|
||||||
|
sortedObservationCodes.add(observationCd0);
|
||||||
|
sortedObservationCodes.add(observationCd1);
|
||||||
|
|
||||||
|
executeTestCase(params, sortedPatients, sortedObservationCodes, myCategories, 60);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testLastNSingleCode() {
|
||||||
|
|
||||||
|
// One code parameter.
|
||||||
|
SearchParameterMap params = new SearchParameterMap();
|
||||||
|
ReferenceParam subjectParam1 = new ReferenceParam("Patient", "", patient0Id.getValue());
|
||||||
|
ReferenceParam subjectParam2 = new ReferenceParam("Patient", "", patient1Id.getValue());
|
||||||
|
ReferenceParam subjectParam3 = new ReferenceParam("Patient", "", patient2Id.getValue());
|
||||||
|
params.add(Observation.SP_SUBJECT, buildReferenceAndListParam(subjectParam1, subjectParam2, subjectParam3));
|
||||||
|
|
||||||
|
TokenParam code = new TokenParam(codeSystem, observationCd0);
|
||||||
|
params.add(Observation.SP_CODE, buildTokenAndListParam(code));
|
||||||
|
List<String> sortedObservationCodes = new ArrayList<>();
|
||||||
|
sortedObservationCodes.add(observationCd0);
|
||||||
|
|
||||||
|
List<String> sortedPatients = new ArrayList<>();
|
||||||
|
sortedPatients.add(patient0Id.getValue());
|
||||||
|
sortedPatients.add(patient1Id.getValue());
|
||||||
|
sortedPatients.add(patient2Id.getValue());
|
||||||
|
|
||||||
|
executeTestCase(params, sortedPatients, sortedObservationCodes, null, 45);
|
||||||
|
|
||||||
|
// Another code parameter.
|
||||||
|
params = new SearchParameterMap();
|
||||||
|
params.add(Observation.SP_SUBJECT, buildReferenceAndListParam(subjectParam1, subjectParam2, subjectParam3));
|
||||||
|
code = new TokenParam(codeSystem, observationCd2);
|
||||||
|
params.add(Observation.SP_CODE, buildTokenAndListParam(code));
|
||||||
|
sortedObservationCodes = new ArrayList<>();
|
||||||
|
sortedObservationCodes.add(observationCd2);
|
||||||
|
|
||||||
|
executeTestCase(params, sortedPatients, sortedObservationCodes, null, 15);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testLastNMultipleCodes() {
|
||||||
|
|
||||||
|
// Two code parameters.
|
||||||
|
SearchParameterMap params = new SearchParameterMap();
|
||||||
|
ReferenceParam subjectParam1 = new ReferenceParam("Patient", "", patient0Id.getValue());
|
||||||
|
ReferenceParam subjectParam2 = new ReferenceParam("Patient", "", patient1Id.getValue());
|
||||||
|
ReferenceParam subjectParam3 = new ReferenceParam("Patient", "", patient2Id.getValue());
|
||||||
|
params.add(Observation.SP_SUBJECT, buildReferenceAndListParam(subjectParam1, subjectParam2, subjectParam3));
|
||||||
|
|
||||||
|
TokenParam codeParam1 = new TokenParam(codeSystem, observationCd0);
|
||||||
|
TokenParam codeParam2 = new TokenParam(codeSystem, observationCd1);
|
||||||
|
params.add(Observation.SP_CODE, buildTokenAndListParam(codeParam1, codeParam2));
|
||||||
|
List<String> sortedObservationCodes = new ArrayList<>();
|
||||||
|
sortedObservationCodes.add(observationCd0);
|
||||||
|
sortedObservationCodes.add(observationCd1);
|
||||||
|
|
||||||
|
List<String> sortedPatients = new ArrayList<>();
|
||||||
|
sortedPatients.add(patient0Id.getValue());
|
||||||
|
sortedPatients.add(patient1Id.getValue());
|
||||||
|
sortedPatients.add(patient2Id.getValue());
|
||||||
|
|
||||||
|
executeTestCase(params, sortedPatients, sortedObservationCodes, null, 75);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testLastNSinglePatientCategoryCode() {
|
||||||
|
|
||||||
|
// One patient, category and code.
|
||||||
|
SearchParameterMap params = new SearchParameterMap();
|
||||||
|
ReferenceParam subjectParam = new ReferenceParam("Patient", "", patient0Id.getValue());
|
||||||
|
params.add(Observation.SP_SUBJECT, buildReferenceAndListParam(subjectParam));
|
||||||
|
TokenParam code = new TokenParam(codeSystem, observationCd0);
|
||||||
|
params.add(Observation.SP_CODE, buildTokenAndListParam(code));
|
||||||
|
TokenParam category = new TokenParam(categorySystem, categoryCd2);
|
||||||
|
params.add(Observation.SP_CATEGORY, buildTokenAndListParam(category));
|
||||||
|
|
||||||
|
List<String> sortedPatients = new ArrayList<>();
|
||||||
|
sortedPatients.add(patient0Id.getValue());
|
||||||
|
|
||||||
|
List<String> sortedObservationCodes = new ArrayList<>();
|
||||||
|
sortedObservationCodes.add(observationCd0);
|
||||||
|
|
||||||
|
List<String> myCategories = new ArrayList<>();
|
||||||
|
myCategories.add(categoryCd2);
|
||||||
|
|
||||||
|
executeTestCase(params, sortedPatients, sortedObservationCodes, myCategories, 5);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testLastNMultiplePatientsCategoriesCodes() {
|
||||||
|
|
||||||
|
// Two patients, categories and codes.
|
||||||
|
SearchParameterMap params = new SearchParameterMap();
|
||||||
|
ReferenceParam subjectParam1 = new ReferenceParam("Patient", "", patient0Id.getValue());
|
||||||
|
ReferenceParam subjectParam2 = new ReferenceParam("Patient", "", patient1Id.getValue());
|
||||||
|
params.add(Observation.SP_SUBJECT, buildReferenceAndListParam(subjectParam1, subjectParam2));
|
||||||
|
List<String> sortedPatients = new ArrayList<>();
|
||||||
|
sortedPatients.add(patient0Id.getValue());
|
||||||
|
sortedPatients.add(patient1Id.getValue());
|
||||||
|
|
||||||
|
TokenParam codeParam1 = new TokenParam(codeSystem, observationCd0);
|
||||||
|
TokenParam codeParam2 = new TokenParam(codeSystem, observationCd2);
|
||||||
|
params.add(Observation.SP_CODE, buildTokenAndListParam(codeParam1, codeParam2));
|
||||||
|
List<String> sortedObservationCodes = new ArrayList<>();
|
||||||
|
sortedObservationCodes.add(observationCd0);
|
||||||
|
sortedObservationCodes.add(observationCd2);
|
||||||
|
|
||||||
|
TokenParam categoryParam1 = new TokenParam(categorySystem, categoryCd1);
|
||||||
|
TokenParam categoryParam2 = new TokenParam(categorySystem, categoryCd2);
|
||||||
|
params.add(Observation.SP_CATEGORY, buildTokenAndListParam(categoryParam1, categoryParam2));
|
||||||
|
List<String> myCategories = new ArrayList<>();
|
||||||
|
myCategories.add(categoryCd1);
|
||||||
|
myCategories.add(categoryCd2);
|
||||||
|
|
||||||
|
executeTestCase(params, sortedPatients, sortedObservationCodes, myCategories, 30);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
protected TokenAndListParam buildTokenAndListParam(TokenParam... theToken) {
|
||||||
|
TokenOrListParam myTokenOrListParam = new TokenOrListParam();
|
||||||
|
for (TokenParam tokenParam : theToken) {
|
||||||
|
myTokenOrListParam.addOr(tokenParam);
|
||||||
|
}
|
||||||
|
return new TokenAndListParam().addAnd(myTokenOrListParam);
|
||||||
|
}
|
||||||
|
|
||||||
|
@AfterClass
|
||||||
|
public static void afterClassClearContext() {
|
||||||
|
TestUtil.clearAllStaticFieldsForUnitTest();
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,133 @@
|
||||||
|
package ca.uhn.fhir.jpa.dao.r4;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||||
|
import ca.uhn.fhir.jpa.dao.SearchBuilder;
|
||||||
|
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||||
|
import ca.uhn.fhir.rest.param.ReferenceParam;
|
||||||
|
import ca.uhn.fhir.rest.param.TokenParam;
|
||||||
|
import ca.uhn.fhir.rest.server.RestfulServer;
|
||||||
|
import ca.uhn.fhir.util.TestUtil;
|
||||||
|
import org.hl7.fhir.r4.model.Observation;
|
||||||
|
import org.junit.After;
|
||||||
|
import org.junit.AfterClass;
|
||||||
|
import org.junit.Before;
|
||||||
|
import org.junit.Test;
|
||||||
|
import org.junit.runner.RunWith;
|
||||||
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
import static org.hamcrest.Matchers.matchesPattern;
|
||||||
|
import static org.junit.Assert.assertEquals;
|
||||||
|
import static org.junit.Assert.assertThat;
|
||||||
|
import static org.mockito.Mockito.when;
|
||||||
|
|
||||||
|
@RunWith(SpringJUnit4ClassRunner.class)
|
||||||
|
public class FhirResourceDaoR4SearchLastNAsyncIT extends BaseR4SearchLastN {
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
protected DaoConfig myDaoConfig;
|
||||||
|
|
||||||
|
private List<Integer> originalPreFetchThresholds;
|
||||||
|
|
||||||
|
@Before
|
||||||
|
public void before() {
|
||||||
|
|
||||||
|
RestfulServer myServer = new RestfulServer(myFhirCtx);
|
||||||
|
myServer.setPagingProvider(myDatabaseBackedPagingProvider);
|
||||||
|
|
||||||
|
when(mySrd.getServer()).thenReturn(myServer);
|
||||||
|
|
||||||
|
// Set pre-fetch sizes small so that most tests are forced to do multiple fetches.
|
||||||
|
// This will allow testing a common use case where result set is larger than first fetch size but smaller than the normal query chunk size.
|
||||||
|
originalPreFetchThresholds = myDaoConfig.getSearchPreFetchThresholds();
|
||||||
|
List<Integer> mySmallerPreFetchThresholds = new ArrayList<>();
|
||||||
|
mySmallerPreFetchThresholds.add(20);
|
||||||
|
mySmallerPreFetchThresholds.add(400);
|
||||||
|
mySmallerPreFetchThresholds.add(-1);
|
||||||
|
myDaoConfig.setSearchPreFetchThresholds(mySmallerPreFetchThresholds);
|
||||||
|
|
||||||
|
SearchBuilder.setMaxPageSize50ForTest(true);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@After
|
||||||
|
public void after() {
|
||||||
|
myDaoConfig.setSearchPreFetchThresholds(originalPreFetchThresholds);
|
||||||
|
SearchBuilder.setMaxPageSize50ForTest(false);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testLastNChunking() {
|
||||||
|
|
||||||
|
// Set up search parameters that will return 75 Observations.
|
||||||
|
SearchParameterMap params = new SearchParameterMap();
|
||||||
|
ReferenceParam subjectParam1 = new ReferenceParam("Patient", "", patient0Id.getValue());
|
||||||
|
ReferenceParam subjectParam2 = new ReferenceParam("Patient", "", patient1Id.getValue());
|
||||||
|
ReferenceParam subjectParam3 = new ReferenceParam("Patient", "", patient2Id.getValue());
|
||||||
|
params.add(Observation.SP_SUBJECT, buildReferenceAndListParam(subjectParam1, subjectParam2, subjectParam3));
|
||||||
|
TokenParam codeParam1 = new TokenParam(codeSystem, observationCd0);
|
||||||
|
TokenParam codeParam2 = new TokenParam(codeSystem, observationCd1);
|
||||||
|
params.add(Observation.SP_CODE, buildTokenAndListParam(codeParam1, codeParam2));
|
||||||
|
|
||||||
|
params.setLastN(true);
|
||||||
|
params.setLastNMax(100);
|
||||||
|
|
||||||
|
Map<String, String[]> requestParameters = new HashMap<>();
|
||||||
|
when(mySrd.getParameters()).thenReturn(requestParameters);
|
||||||
|
|
||||||
|
// Set chunk size to 50
|
||||||
|
SearchBuilder.setMaxPageSize50ForTest(true);
|
||||||
|
|
||||||
|
// Expand default fetch sizes to ensure all observations are returned in first page:
|
||||||
|
List<Integer> myBiggerPreFetchThresholds = new ArrayList<>();
|
||||||
|
myBiggerPreFetchThresholds.add(100);
|
||||||
|
myBiggerPreFetchThresholds.add(1000);
|
||||||
|
myBiggerPreFetchThresholds.add(-1);
|
||||||
|
myDaoConfig.setSearchPreFetchThresholds(myBiggerPreFetchThresholds);
|
||||||
|
|
||||||
|
myCaptureQueriesListener.clear();
|
||||||
|
List<String> results = toUnqualifiedVersionlessIdValues(myObservationDao.observationsLastN(params, mockSrd(),null));
|
||||||
|
assertEquals(75, results.size());
|
||||||
|
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||||
|
List<String> queries = myCaptureQueriesListener
|
||||||
|
.getSelectQueriesForCurrentThread()
|
||||||
|
.stream()
|
||||||
|
.map(t -> t.getSql(true, false))
|
||||||
|
.collect(Collectors.toList());
|
||||||
|
|
||||||
|
// 1 query to lookup up Search from cache, and 2 chunked queries to retrieve resources by PID.
|
||||||
|
assertEquals(3, queries.size());
|
||||||
|
|
||||||
|
// The first chunked query should have a full complement of PIDs
|
||||||
|
StringBuilder firstQueryPattern = new StringBuilder(".*RES_ID in \\('[0-9]+'");
|
||||||
|
for (int pidIndex = 1; pidIndex<50; pidIndex++) {
|
||||||
|
firstQueryPattern.append(" , '[0-9]+'");
|
||||||
|
}
|
||||||
|
firstQueryPattern.append("\\).*");
|
||||||
|
assertThat(queries.get(1), matchesPattern(firstQueryPattern.toString()));
|
||||||
|
|
||||||
|
// the second chunked query should be padded with "-1".
|
||||||
|
StringBuilder secondQueryPattern = new StringBuilder(".*RES_ID in \\('[0-9]+'");
|
||||||
|
for (int pidIndex = 1; pidIndex<25; pidIndex++) {
|
||||||
|
secondQueryPattern.append(" , '[0-9]+'");
|
||||||
|
}
|
||||||
|
for (int pidIndex = 0; pidIndex<25; pidIndex++) {
|
||||||
|
secondQueryPattern.append(" , '-1'");
|
||||||
|
}
|
||||||
|
secondQueryPattern.append("\\).*");
|
||||||
|
assertThat(queries.get(2), matchesPattern(secondQueryPattern.toString()));
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@AfterClass
|
||||||
|
public static void afterClassClearContext() {
|
||||||
|
TestUtil.clearAllStaticFieldsForUnitTest();
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,92 @@
|
||||||
|
package ca.uhn.fhir.jpa.dao.r4;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.dao.SearchBuilder;
|
||||||
|
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||||
|
import ca.uhn.fhir.rest.param.*;
|
||||||
|
import ca.uhn.fhir.util.TestUtil;
|
||||||
|
import org.hl7.fhir.r4.model.*;
|
||||||
|
import org.junit.After;
|
||||||
|
import org.junit.AfterClass;
|
||||||
|
import org.junit.Test;
|
||||||
|
import org.junit.runner.RunWith;
|
||||||
|
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
|
||||||
|
|
||||||
|
import java.util.*;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
import static org.hamcrest.Matchers.matchesPattern;
|
||||||
|
import static org.junit.Assert.*;
|
||||||
|
import static org.mockito.Mockito.when;
|
||||||
|
|
||||||
|
@RunWith(SpringJUnit4ClassRunner.class)
|
||||||
|
public class FhirResourceDaoR4SearchLastNIT extends BaseR4SearchLastN {
|
||||||
|
|
||||||
|
@After
|
||||||
|
public void resetMaximumPageSize() {
|
||||||
|
SearchBuilder.setMaxPageSize50ForTest(false);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testLastNChunking() {
|
||||||
|
|
||||||
|
// Set up search parameters that will return 75 Observations.
|
||||||
|
SearchParameterMap params = new SearchParameterMap();
|
||||||
|
ReferenceParam subjectParam1 = new ReferenceParam("Patient", "", patient0Id.getValue());
|
||||||
|
ReferenceParam subjectParam2 = new ReferenceParam("Patient", "", patient1Id.getValue());
|
||||||
|
ReferenceParam subjectParam3 = new ReferenceParam("Patient", "", patient2Id.getValue());
|
||||||
|
params.add(Observation.SP_SUBJECT, buildReferenceAndListParam(subjectParam1, subjectParam2, subjectParam3));
|
||||||
|
TokenParam codeParam1 = new TokenParam(codeSystem, observationCd0);
|
||||||
|
TokenParam codeParam2 = new TokenParam(codeSystem, observationCd1);
|
||||||
|
params.add(Observation.SP_CODE, buildTokenAndListParam(codeParam1, codeParam2));
|
||||||
|
|
||||||
|
params.setLastN(true);
|
||||||
|
params.setLastNMax(100);
|
||||||
|
|
||||||
|
Map<String, String[]> requestParameters = new HashMap<>();
|
||||||
|
when(mySrd.getParameters()).thenReturn(requestParameters);
|
||||||
|
|
||||||
|
// Set chunk size to 50
|
||||||
|
SearchBuilder.setMaxPageSize50ForTest(true);
|
||||||
|
|
||||||
|
myCaptureQueriesListener.clear();
|
||||||
|
List<String> results = toUnqualifiedVersionlessIdValues(myObservationDao.observationsLastN(params, mockSrd(),null));
|
||||||
|
assertEquals(75, results.size());
|
||||||
|
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||||
|
List<String> queries = myCaptureQueriesListener
|
||||||
|
.getSelectQueriesForCurrentThread()
|
||||||
|
.stream()
|
||||||
|
.map(t -> t.getSql(true, false))
|
||||||
|
.collect(Collectors.toList());
|
||||||
|
|
||||||
|
// Two chunked queries executed by the QueryIterator (in current thread) and two chunked queries to retrieve resources by PID.
|
||||||
|
assertEquals(4, queries.size());
|
||||||
|
|
||||||
|
// The first and third chunked queries should have a full complement of PIDs
|
||||||
|
StringBuilder firstQueryPattern = new StringBuilder(".*RES_ID in \\('[0-9]+'");
|
||||||
|
for (int pidIndex = 1; pidIndex<50; pidIndex++) {
|
||||||
|
firstQueryPattern.append(" , '[0-9]+'");
|
||||||
|
}
|
||||||
|
firstQueryPattern.append("\\).*");
|
||||||
|
assertThat(queries.get(0), matchesPattern(firstQueryPattern.toString()));
|
||||||
|
assertThat(queries.get(2), matchesPattern(firstQueryPattern.toString()));
|
||||||
|
|
||||||
|
// the second and fourth chunked queries should be padded with "-1".
|
||||||
|
StringBuilder secondQueryPattern = new StringBuilder(".*RES_ID in \\('[0-9]+'");
|
||||||
|
for (int pidIndex = 1; pidIndex<25; pidIndex++) {
|
||||||
|
secondQueryPattern.append(" , '[0-9]+'");
|
||||||
|
}
|
||||||
|
for (int pidIndex = 0; pidIndex<25; pidIndex++) {
|
||||||
|
secondQueryPattern.append(" , '-1'");
|
||||||
|
}
|
||||||
|
secondQueryPattern.append("\\).*");
|
||||||
|
assertThat(queries.get(1), matchesPattern(secondQueryPattern.toString()));
|
||||||
|
assertThat(queries.get(3), matchesPattern(secondQueryPattern.toString()));
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@AfterClass
|
||||||
|
public static void afterClassClearContext() {
|
||||||
|
TestUtil.clearAllStaticFieldsForUnitTest();
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -530,7 +530,7 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
|
||||||
myCaptureQueriesListener.clear();
|
myCaptureQueriesListener.clear();
|
||||||
map = new SearchParameterMap();
|
map = new SearchParameterMap();
|
||||||
map.setLoadSynchronous(true);
|
map.setLoadSynchronous(true);
|
||||||
map.add(DiagnosticReport.SP_PERFORMER, new ReferenceParam( "CareTeam").setChain(PARAM_TYPE));
|
map.add(DiagnosticReport.SP_PERFORMER, new ReferenceParam("CareTeam").setChain(PARAM_TYPE));
|
||||||
results = myDiagnosticReportDao.search(map);
|
results = myDiagnosticReportDao.search(map);
|
||||||
ids = toUnqualifiedVersionlessIdValues(results);
|
ids = toUnqualifiedVersionlessIdValues(results);
|
||||||
assertThat(ids.toString(), ids, contains(drId1.getValue()));
|
assertThat(ids.toString(), ids, contains(drId1.getValue()));
|
||||||
|
@ -1690,6 +1690,338 @@ public class FhirResourceDaoR4SearchNoFtTest extends BaseJpaR4Test {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testDateRangeOnPeriod_SearchByDateTime_NoUpperBound() {
|
||||||
|
Encounter enc = new Encounter();
|
||||||
|
enc.getPeriod().getStartElement().setValueAsString("2020-05-26T12:00:00Z");
|
||||||
|
String id1 = myEncounterDao.create(enc).getId().toUnqualifiedVersionless().getValue();
|
||||||
|
|
||||||
|
runInTransaction(() -> {
|
||||||
|
ourLog.info("Date indexes:\n * {}", myResourceIndexedSearchParamDateDao.findAll().stream().map(t -> t.toString()).collect(Collectors.joining("\n * ")));
|
||||||
|
});
|
||||||
|
|
||||||
|
// ge -> above the lower bound
|
||||||
|
SearchParameterMap map = SearchParameterMap.newSynchronous();
|
||||||
|
map.add(Encounter.SP_DATE, new DateParam("ge2020-05-26T13:00:00Z"));
|
||||||
|
myCaptureQueriesListener.clear();
|
||||||
|
IBundleProvider results = myEncounterDao.search(map);
|
||||||
|
List<String> ids = toUnqualifiedVersionlessIdValues(results);
|
||||||
|
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||||
|
assertThat(ids, contains(id1));
|
||||||
|
|
||||||
|
// ge -> Below the lower bound
|
||||||
|
map = SearchParameterMap.newSynchronous();
|
||||||
|
map.add(Encounter.SP_DATE, new DateParam("ge2020-05-26T11:00:00Z"));
|
||||||
|
myCaptureQueriesListener.clear();
|
||||||
|
results = myEncounterDao.search(map);
|
||||||
|
ids = toUnqualifiedVersionlessIdValues(results);
|
||||||
|
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||||
|
assertThat(ids, contains(id1));
|
||||||
|
|
||||||
|
// le -> above the lower bound
|
||||||
|
map = SearchParameterMap.newSynchronous();
|
||||||
|
map.add(Encounter.SP_DATE, new DateParam("le2020-05-26T13:00:00Z"));
|
||||||
|
myCaptureQueriesListener.clear();
|
||||||
|
results = myEncounterDao.search(map);
|
||||||
|
ids = toUnqualifiedVersionlessIdValues(results);
|
||||||
|
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||||
|
assertThat(ids, contains(id1));
|
||||||
|
|
||||||
|
// le -> Below the lower bound
|
||||||
|
map = SearchParameterMap.newSynchronous();
|
||||||
|
map.add(Encounter.SP_DATE, new DateParam("le2020-05-26T11:00:00Z"));
|
||||||
|
myCaptureQueriesListener.clear();
|
||||||
|
results = myEncounterDao.search(map);
|
||||||
|
ids = toUnqualifiedVersionlessIdValues(results);
|
||||||
|
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||||
|
assertThat(ids, empty());
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testDateRangeOnPeriod_SearchByDate_NoUpperBound() {
|
||||||
|
Encounter enc = new Encounter();
|
||||||
|
enc.getPeriod().getStartElement().setValueAsString("2020-05-26T12:00:00Z");
|
||||||
|
String id1 = myEncounterDao.create(enc).getId().toUnqualifiedVersionless().getValue();
|
||||||
|
|
||||||
|
runInTransaction(() -> {
|
||||||
|
ourLog.info("Date indexes:\n * {}", myResourceIndexedSearchParamDateDao.findAll().stream().map(t -> t.toString()).collect(Collectors.joining("\n * ")));
|
||||||
|
});
|
||||||
|
|
||||||
|
// ge -> above the lower bound
|
||||||
|
SearchParameterMap map = SearchParameterMap.newSynchronous();
|
||||||
|
map.add(Encounter.SP_DATE, new DateParam("ge2020-05-27"));
|
||||||
|
myCaptureQueriesListener.clear();
|
||||||
|
IBundleProvider results = myEncounterDao.search(map);
|
||||||
|
List<String> ids = toUnqualifiedVersionlessIdValues(results);
|
||||||
|
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||||
|
assertThat(ids, contains(id1));
|
||||||
|
|
||||||
|
// ge -> Below the lower bound
|
||||||
|
map = SearchParameterMap.newSynchronous();
|
||||||
|
map.add(Encounter.SP_DATE, new DateParam("ge2020-05-25"));
|
||||||
|
myCaptureQueriesListener.clear();
|
||||||
|
results = myEncounterDao.search(map);
|
||||||
|
ids = toUnqualifiedVersionlessIdValues(results);
|
||||||
|
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||||
|
assertThat(ids, contains(id1));
|
||||||
|
|
||||||
|
// le -> above the lower bound
|
||||||
|
map = SearchParameterMap.newSynchronous();
|
||||||
|
map.add(Encounter.SP_DATE, new DateParam("le2020-05-27"));
|
||||||
|
myCaptureQueriesListener.clear();
|
||||||
|
results = myEncounterDao.search(map);
|
||||||
|
ids = toUnqualifiedVersionlessIdValues(results);
|
||||||
|
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||||
|
assertThat(ids, contains(id1));
|
||||||
|
|
||||||
|
// le -> Below the lower bound
|
||||||
|
map = SearchParameterMap.newSynchronous();
|
||||||
|
map.add(Encounter.SP_DATE, new DateParam("le2020-05-25"));
|
||||||
|
myCaptureQueriesListener.clear();
|
||||||
|
results = myEncounterDao.search(map);
|
||||||
|
ids = toUnqualifiedVersionlessIdValues(results);
|
||||||
|
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||||
|
assertThat(ids, empty());
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testDateRangeOnPeriod_SearchByDateTime_NoLowerBound() {
|
||||||
|
Encounter enc = new Encounter();
|
||||||
|
enc.getPeriod().getEndElement().setValueAsString("2020-05-26T12:00:00Z");
|
||||||
|
String id1 = myEncounterDao.create(enc).getId().toUnqualifiedVersionless().getValue();
|
||||||
|
|
||||||
|
runInTransaction(() -> {
|
||||||
|
ourLog.info("Date indexes:\n * {}", myResourceIndexedSearchParamDateDao.findAll().stream().map(t -> t.toString()).collect(Collectors.joining("\n * ")));
|
||||||
|
});
|
||||||
|
|
||||||
|
// le -> above the upper bound
|
||||||
|
SearchParameterMap map = SearchParameterMap.newSynchronous();
|
||||||
|
map.add(Encounter.SP_DATE, new DateParam("le2020-05-26T13:00:00Z"));
|
||||||
|
myCaptureQueriesListener.clear();
|
||||||
|
IBundleProvider results = myEncounterDao.search(map);
|
||||||
|
List<String> ids = toUnqualifiedVersionlessIdValues(results);
|
||||||
|
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||||
|
assertThat(ids, contains(id1));
|
||||||
|
|
||||||
|
// le -> Below the upper bound
|
||||||
|
map = SearchParameterMap.newSynchronous();
|
||||||
|
map.add(Encounter.SP_DATE, new DateParam("le2020-05-26T11:00:00Z"));
|
||||||
|
myCaptureQueriesListener.clear();
|
||||||
|
results = myEncounterDao.search(map);
|
||||||
|
ids = toUnqualifiedVersionlessIdValues(results);
|
||||||
|
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||||
|
assertThat(ids, contains(id1));
|
||||||
|
|
||||||
|
// ge -> above the upper bound
|
||||||
|
map = SearchParameterMap.newSynchronous();
|
||||||
|
map.add(Encounter.SP_DATE, new DateParam("ge2020-05-26T13:00:00Z"));
|
||||||
|
myCaptureQueriesListener.clear();
|
||||||
|
results = myEncounterDao.search(map);
|
||||||
|
ids = toUnqualifiedVersionlessIdValues(results);
|
||||||
|
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||||
|
assertThat(ids, empty());
|
||||||
|
|
||||||
|
// ge -> Below the upper bound
|
||||||
|
map = SearchParameterMap.newSynchronous();
|
||||||
|
map.add(Encounter.SP_DATE, new DateParam("ge2020-05-26T11:00:00Z"));
|
||||||
|
myCaptureQueriesListener.clear();
|
||||||
|
results = myEncounterDao.search(map);
|
||||||
|
ids = toUnqualifiedVersionlessIdValues(results);
|
||||||
|
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||||
|
assertThat(ids, contains(id1));
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testDateRangeOnPeriod_SearchByDate_NoLowerBound() {
|
||||||
|
Encounter enc = new Encounter();
|
||||||
|
enc.getPeriod().getEndElement().setValueAsString("2020-05-26T12:00:00Z");
|
||||||
|
String id1 = myEncounterDao.create(enc).getId().toUnqualifiedVersionless().getValue();
|
||||||
|
|
||||||
|
runInTransaction(() -> {
|
||||||
|
ourLog.info("Date indexes:\n * {}", myResourceIndexedSearchParamDateDao.findAll().stream().map(t -> t.toString()).collect(Collectors.joining("\n * ")));
|
||||||
|
});
|
||||||
|
|
||||||
|
// le -> above the upper bound
|
||||||
|
SearchParameterMap map = SearchParameterMap.newSynchronous();
|
||||||
|
map.add(Encounter.SP_DATE, new DateParam("le2020-05-27"));
|
||||||
|
myCaptureQueriesListener.clear();
|
||||||
|
IBundleProvider results = myEncounterDao.search(map);
|
||||||
|
List<String> ids = toUnqualifiedVersionlessIdValues(results);
|
||||||
|
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||||
|
assertThat(ids, contains(id1));
|
||||||
|
|
||||||
|
// le -> Below the upper bound
|
||||||
|
map = SearchParameterMap.newSynchronous();
|
||||||
|
map.add(Encounter.SP_DATE, new DateParam("le2020-05-25"));
|
||||||
|
myCaptureQueriesListener.clear();
|
||||||
|
results = myEncounterDao.search(map);
|
||||||
|
ids = toUnqualifiedVersionlessIdValues(results);
|
||||||
|
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||||
|
assertThat(ids, contains(id1));
|
||||||
|
|
||||||
|
// ge -> above the upper bound
|
||||||
|
map = SearchParameterMap.newSynchronous();
|
||||||
|
map.add(Encounter.SP_DATE, new DateParam("ge2020-05-27"));
|
||||||
|
myCaptureQueriesListener.clear();
|
||||||
|
results = myEncounterDao.search(map);
|
||||||
|
ids = toUnqualifiedVersionlessIdValues(results);
|
||||||
|
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||||
|
assertThat(ids, empty());
|
||||||
|
|
||||||
|
// ge -> Below the upper bound
|
||||||
|
map = SearchParameterMap.newSynchronous();
|
||||||
|
map.add(Encounter.SP_DATE, new DateParam("ge2020-05-25"));
|
||||||
|
myCaptureQueriesListener.clear();
|
||||||
|
results = myEncounterDao.search(map);
|
||||||
|
ids = toUnqualifiedVersionlessIdValues(results);
|
||||||
|
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||||
|
assertThat(ids, contains(id1));
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testDatePeriodParamEndOnly() {
|
||||||
|
{
|
||||||
|
Encounter enc = new Encounter();
|
||||||
|
enc.addIdentifier().setSystem("testDatePeriodParam").setValue("02");
|
||||||
|
enc.getPeriod().getEndElement().setValueAsString("2001-01-02");
|
||||||
|
myEncounterDao.create(enc, mySrd);
|
||||||
|
}
|
||||||
|
SearchParameterMap params;
|
||||||
|
List<Encounter> encs;
|
||||||
|
|
||||||
|
params = new SearchParameterMap();
|
||||||
|
params.add(Encounter.SP_DATE, new DateRangeParam(null, "2001-01-03"));
|
||||||
|
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "02"));
|
||||||
|
encs = toList(myEncounterDao.search(params));
|
||||||
|
assertEquals(1, encs.size());
|
||||||
|
|
||||||
|
params = new SearchParameterMap();
|
||||||
|
params.add(Encounter.SP_DATE, new DateRangeParam("2001-01-01", "2001-01-03"));
|
||||||
|
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "02"));
|
||||||
|
encs = toList(myEncounterDao.search(params));
|
||||||
|
assertEquals(1, encs.size());
|
||||||
|
|
||||||
|
params = new SearchParameterMap();
|
||||||
|
params.add(Encounter.SP_DATE, new DateRangeParam("2001-01-01", null));
|
||||||
|
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "02"));
|
||||||
|
encs = toList(myEncounterDao.search(params));
|
||||||
|
assertEquals(1, encs.size());
|
||||||
|
|
||||||
|
params = new SearchParameterMap();
|
||||||
|
params.add(Encounter.SP_DATE, new DateRangeParam(null, "2001-01-01"));
|
||||||
|
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "02"));
|
||||||
|
encs = toList(myEncounterDao.search(params));
|
||||||
|
assertEquals(1, encs.size());
|
||||||
|
|
||||||
|
params = new SearchParameterMap();
|
||||||
|
params.add(Encounter.SP_DATE, new DateRangeParam("2001-01-03", null));
|
||||||
|
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "02"));
|
||||||
|
encs = toList(myEncounterDao.search(params));
|
||||||
|
assertEquals(0, encs.size());
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testDatePeriodParamStartAndEnd() {
|
||||||
|
{
|
||||||
|
Encounter enc = new Encounter();
|
||||||
|
enc.addIdentifier().setSystem("testDatePeriodParam").setValue("03");
|
||||||
|
enc.getPeriod().getStartElement().setValueAsString("2001-01-02");
|
||||||
|
enc.getPeriod().getEndElement().setValueAsString("2001-01-03");
|
||||||
|
myEncounterDao.create(enc, mySrd);
|
||||||
|
}
|
||||||
|
|
||||||
|
SearchParameterMap params = new SearchParameterMap();
|
||||||
|
params.add(Encounter.SP_DATE, new DateRangeParam("2001-01-01", "2001-01-03"));
|
||||||
|
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "03"));
|
||||||
|
List<Encounter> encs = toList(myEncounterDao.search(params));
|
||||||
|
assertEquals(1, encs.size());
|
||||||
|
|
||||||
|
params = new SearchParameterMap();
|
||||||
|
params.add(Encounter.SP_DATE, new DateRangeParam("2001-01-02", "2001-01-06"));
|
||||||
|
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "03"));
|
||||||
|
encs = toList(myEncounterDao.search(params));
|
||||||
|
assertEquals(1, encs.size());
|
||||||
|
|
||||||
|
params = new SearchParameterMap();
|
||||||
|
params.add(Encounter.SP_DATE, new DateRangeParam("2001-01-01", null));
|
||||||
|
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "03"));
|
||||||
|
encs = toList(myEncounterDao.search(params));
|
||||||
|
assertEquals(1, encs.size());
|
||||||
|
|
||||||
|
params = new SearchParameterMap();
|
||||||
|
params.add(Encounter.SP_DATE, new DateRangeParam(null, "2001-01-03"));
|
||||||
|
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "03"));
|
||||||
|
encs = toList(myEncounterDao.search(params));
|
||||||
|
assertEquals(1, encs.size());
|
||||||
|
|
||||||
|
params = new SearchParameterMap();
|
||||||
|
params.add(Encounter.SP_DATE, new DateRangeParam(null, "2001-01-05"));
|
||||||
|
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "03"));
|
||||||
|
encs = toList(myEncounterDao.search(params));
|
||||||
|
assertEquals(1, encs.size());
|
||||||
|
|
||||||
|
params = new SearchParameterMap();
|
||||||
|
params.add(Encounter.SP_DATE, new DateRangeParam(null, "2001-01-01"));
|
||||||
|
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "03"));
|
||||||
|
encs = toList(myEncounterDao.search(params));
|
||||||
|
assertEquals(0, encs.size());
|
||||||
|
|
||||||
|
params = new SearchParameterMap();
|
||||||
|
params.add(Encounter.SP_DATE, new DateRangeParam("2001-01-05", null));
|
||||||
|
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "03"));
|
||||||
|
encs = toList(myEncounterDao.search(params));
|
||||||
|
assertEquals(0, encs.size());
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testDatePeriodParamStartOnly() {
|
||||||
|
{
|
||||||
|
Encounter enc = new Encounter();
|
||||||
|
enc.addIdentifier().setSystem("testDatePeriodParam").setValue("01");
|
||||||
|
enc.getPeriod().getStartElement().setValueAsString("2001-01-02");
|
||||||
|
myEncounterDao.create(enc, mySrd);
|
||||||
|
}
|
||||||
|
|
||||||
|
SearchParameterMap params = new SearchParameterMap();
|
||||||
|
params.add(Encounter.SP_DATE, new DateRangeParam("2001-01-01", "2001-01-03"));
|
||||||
|
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "01"));
|
||||||
|
List<Encounter> encs = toList(myEncounterDao.search(params));
|
||||||
|
assertEquals(1, encs.size());
|
||||||
|
|
||||||
|
params = new SearchParameterMap();
|
||||||
|
params.add(Encounter.SP_DATE, new DateRangeParam("2001-01-01", null));
|
||||||
|
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "01"));
|
||||||
|
encs = toList(myEncounterDao.search(params));
|
||||||
|
assertEquals(1, encs.size());
|
||||||
|
|
||||||
|
params = new SearchParameterMap();
|
||||||
|
params.add(Encounter.SP_DATE, new DateRangeParam(null, "2001-01-03"));
|
||||||
|
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "01"));
|
||||||
|
encs = toList(myEncounterDao.search(params));
|
||||||
|
assertEquals(1, encs.size());
|
||||||
|
|
||||||
|
params = new SearchParameterMap();
|
||||||
|
params.add(Encounter.SP_DATE, new DateRangeParam(null, "2001-01-01"));
|
||||||
|
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "01"));
|
||||||
|
encs = toList(myEncounterDao.search(params));
|
||||||
|
assertEquals(0, encs.size());
|
||||||
|
|
||||||
|
params = new SearchParameterMap();
|
||||||
|
params.add(Encounter.SP_DATE, new DateRangeParam("2001-01-03", null));
|
||||||
|
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "01"));
|
||||||
|
encs = toList(myEncounterDao.search(params));
|
||||||
|
assertEquals(1, encs.size());
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* See #1174
|
* See #1174
|
||||||
*/
|
*/
|
||||||
|
|
|
@ -311,7 +311,7 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
|
||||||
* 20 should be prefetched since that's the initial page size
|
* 20 should be prefetched since that's the initial page size
|
||||||
*/
|
*/
|
||||||
|
|
||||||
await().until(()-> runInTransaction(()->{
|
await().until(() -> runInTransaction(() -> {
|
||||||
Search search = mySearchEntityDao.findByUuidAndFetchIncludes(uuid).orElseThrow(() -> new InternalErrorException(""));
|
Search search = mySearchEntityDao.findByUuidAndFetchIncludes(uuid).orElseThrow(() -> new InternalErrorException(""));
|
||||||
return search.getNumFound() >= 200;
|
return search.getNumFound() >= 200;
|
||||||
}));
|
}));
|
||||||
|
@ -371,8 +371,8 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
|
||||||
* 20 should be prefetched since that's the initial page size
|
* 20 should be prefetched since that's the initial page size
|
||||||
*/
|
*/
|
||||||
|
|
||||||
await().until(()->{
|
await().until(() -> {
|
||||||
return runInTransaction(()->{
|
return runInTransaction(() -> {
|
||||||
return mySearchEntityDao
|
return mySearchEntityDao
|
||||||
.findByUuidAndFetchIncludes(uuid)
|
.findByUuidAndFetchIncludes(uuid)
|
||||||
.orElseThrow(() -> new InternalErrorException(""))
|
.orElseThrow(() -> new InternalErrorException(""))
|
||||||
|
@ -507,8 +507,8 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
|
||||||
* 20 should be prefetched since that's the initial page size
|
* 20 should be prefetched since that's the initial page size
|
||||||
*/
|
*/
|
||||||
|
|
||||||
await().until(()->{
|
await().until(() -> {
|
||||||
return runInTransaction(()->{
|
return runInTransaction(() -> {
|
||||||
Search search = mySearchEntityDao.findByUuidAndFetchIncludes(uuid).orElseThrow(() -> new InternalErrorException(""));
|
Search search = mySearchEntityDao.findByUuidAndFetchIncludes(uuid).orElseThrow(() -> new InternalErrorException(""));
|
||||||
return search.getNumFound() >= 50;
|
return search.getNumFound() >= 50;
|
||||||
});
|
});
|
||||||
|
@ -547,8 +547,8 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
|
||||||
/*
|
/*
|
||||||
* 20 should be prefetched since that's the initial page size
|
* 20 should be prefetched since that's the initial page size
|
||||||
*/
|
*/
|
||||||
await().until(()->{
|
await().until(() -> {
|
||||||
return runInTransaction(()->{
|
return runInTransaction(() -> {
|
||||||
Search search = mySearchEntityDao.findByUuidAndFetchIncludes(uuid).orElseThrow(() -> new InternalErrorException(""));
|
Search search = mySearchEntityDao.findByUuidAndFetchIncludes(uuid).orElseThrow(() -> new InternalErrorException(""));
|
||||||
return search.getNumFound() == 20;
|
return search.getNumFound() == 20;
|
||||||
});
|
});
|
||||||
|
@ -611,7 +611,12 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
|
||||||
* 20 should be prefetched since that's the initial page size
|
* 20 should be prefetched since that's the initial page size
|
||||||
*/
|
*/
|
||||||
|
|
||||||
waitForSize(20, () -> runInTransaction(() -> mySearchEntityDao.findByUuidAndFetchIncludes(uuid).orElseThrow(() -> new InternalErrorException("")).getNumFound()));
|
waitForSize(
|
||||||
|
20,
|
||||||
|
10000,
|
||||||
|
() -> runInTransaction(() -> mySearchEntityDao.findByUuidAndFetchIncludes(uuid).orElseThrow(() -> new InternalErrorException("")).getNumFound()),
|
||||||
|
() -> "Wanted 20: " + runInTransaction(() -> mySearchEntityDao.findByUuidAndFetchIncludes(uuid).orElseThrow(() -> new InternalErrorException("")).toString()));
|
||||||
|
|
||||||
runInTransaction(() -> {
|
runInTransaction(() -> {
|
||||||
Search search = mySearchEntityDao.findByUuidAndFetchIncludes(uuid).orElseThrow(() -> new InternalErrorException(""));
|
Search search = mySearchEntityDao.findByUuidAndFetchIncludes(uuid).orElseThrow(() -> new InternalErrorException(""));
|
||||||
assertEquals(20, search.getNumFound());
|
assertEquals(20, search.getNumFound());
|
||||||
|
@ -673,7 +678,7 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
|
||||||
assertEquals("Patient/PT00000", ids.get(0));
|
assertEquals("Patient/PT00000", ids.get(0));
|
||||||
assertEquals(1, ids.size());
|
assertEquals(1, ids.size());
|
||||||
|
|
||||||
await().until(()-> runInTransaction(()-> mySearchEntityDao
|
await().until(() -> runInTransaction(() -> mySearchEntityDao
|
||||||
.findByUuidAndFetchIncludes(uuid).orElseThrow(() -> new InternalErrorException(""))
|
.findByUuidAndFetchIncludes(uuid).orElseThrow(() -> new InternalErrorException(""))
|
||||||
.getStatus() == SearchStatusEnum.FINISHED));
|
.getStatus() == SearchStatusEnum.FINISHED));
|
||||||
|
|
||||||
|
@ -821,7 +826,7 @@ public class FhirResourceDaoR4SearchOptimizedTest extends BaseJpaR4Test {
|
||||||
search.getResources(0, 20);
|
search.getResources(0, 20);
|
||||||
ourLog.info("** Done retrieving resources");
|
ourLog.info("** Done retrieving resources");
|
||||||
|
|
||||||
await().until(()->myCaptureQueriesListener.countSelectQueries() == 4);
|
await().until(() -> myCaptureQueriesListener.countSelectQueries() == 4);
|
||||||
|
|
||||||
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||||
assertEquals(4, myCaptureQueriesListener.countSelectQueries());
|
assertEquals(4, myCaptureQueriesListener.countSelectQueries());
|
||||||
|
|
|
@ -60,10 +60,10 @@ import static org.junit.Assert.assertThat;
|
||||||
@RunWith(SpringJUnit4ClassRunner.class)
|
@RunWith(SpringJUnit4ClassRunner.class)
|
||||||
@ContextConfiguration(classes = {TestR4ConfigWithElasticSearch.class})
|
@ContextConfiguration(classes = {TestR4ConfigWithElasticSearch.class})
|
||||||
@DirtiesContext(classMode = DirtiesContext.ClassMode.AFTER_CLASS)
|
@DirtiesContext(classMode = DirtiesContext.ClassMode.AFTER_CLASS)
|
||||||
public class FhirResourceDaoR4SearchWithElasticSearchTest extends BaseJpaTest {
|
public class FhirResourceDaoR4SearchWithElasticSearchIT extends BaseJpaTest {
|
||||||
public static final String URL_MY_CODE_SYSTEM = "http://example.com/my_code_system";
|
public static final String URL_MY_CODE_SYSTEM = "http://example.com/my_code_system";
|
||||||
public static final String URL_MY_VALUE_SET = "http://example.com/my_value_set";
|
public static final String URL_MY_VALUE_SET = "http://example.com/my_value_set";
|
||||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirResourceDaoR4SearchWithElasticSearchTest.class);
|
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirResourceDaoR4SearchWithElasticSearchIT.class);
|
||||||
@Autowired
|
@Autowired
|
||||||
protected DaoConfig myDaoConfig;
|
protected DaoConfig myDaoConfig;
|
||||||
@Autowired
|
@Autowired
|
|
@ -1118,143 +1118,6 @@ public class FhirResourceDaoR4Test extends BaseJpaR4Test {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testDatePeriodParamEndOnly() {
|
|
||||||
{
|
|
||||||
Encounter enc = new Encounter();
|
|
||||||
enc.addIdentifier().setSystem("testDatePeriodParam").setValue("02");
|
|
||||||
enc.getPeriod().getEndElement().setValueAsString("2001-01-02");
|
|
||||||
myEncounterDao.create(enc, mySrd);
|
|
||||||
}
|
|
||||||
SearchParameterMap params;
|
|
||||||
List<Encounter> encs;
|
|
||||||
|
|
||||||
params = new SearchParameterMap();
|
|
||||||
params.add(Encounter.SP_DATE, new DateRangeParam(null, "2001-01-03"));
|
|
||||||
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "02"));
|
|
||||||
encs = toList(myEncounterDao.search(params));
|
|
||||||
assertEquals(1, encs.size());
|
|
||||||
|
|
||||||
params = new SearchParameterMap();
|
|
||||||
params.add(Encounter.SP_DATE, new DateRangeParam("2001-01-01", "2001-01-03"));
|
|
||||||
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "02"));
|
|
||||||
// encs = toList(ourEncounterDao.search(params));
|
|
||||||
// assertEquals(1, encs.size());
|
|
||||||
|
|
||||||
params = new SearchParameterMap();
|
|
||||||
params.add(Encounter.SP_DATE, new DateRangeParam("2001-01-01", null));
|
|
||||||
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "02"));
|
|
||||||
encs = toList(myEncounterDao.search(params));
|
|
||||||
assertEquals(1, encs.size());
|
|
||||||
|
|
||||||
params = new SearchParameterMap();
|
|
||||||
params.add(Encounter.SP_DATE, new DateRangeParam(null, "2001-01-01"));
|
|
||||||
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "02"));
|
|
||||||
encs = toList(myEncounterDao.search(params));
|
|
||||||
assertEquals(0, encs.size());
|
|
||||||
|
|
||||||
params = new SearchParameterMap();
|
|
||||||
params.add(Encounter.SP_DATE, new DateRangeParam("2001-01-03", null));
|
|
||||||
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "02"));
|
|
||||||
encs = toList(myEncounterDao.search(params));
|
|
||||||
assertEquals(0, encs.size());
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testDatePeriodParamStartAndEnd() {
|
|
||||||
{
|
|
||||||
Encounter enc = new Encounter();
|
|
||||||
enc.addIdentifier().setSystem("testDatePeriodParam").setValue("03");
|
|
||||||
enc.getPeriod().getStartElement().setValueAsString("2001-01-02");
|
|
||||||
enc.getPeriod().getEndElement().setValueAsString("2001-01-03");
|
|
||||||
myEncounterDao.create(enc, mySrd);
|
|
||||||
}
|
|
||||||
|
|
||||||
SearchParameterMap params = new SearchParameterMap();
|
|
||||||
params.add(Encounter.SP_DATE, new DateRangeParam("2001-01-01", "2001-01-03"));
|
|
||||||
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "03"));
|
|
||||||
List<Encounter> encs = toList(myEncounterDao.search(params));
|
|
||||||
assertEquals(1, encs.size());
|
|
||||||
|
|
||||||
params = new SearchParameterMap();
|
|
||||||
params.add(Encounter.SP_DATE, new DateRangeParam("2001-01-02", "2001-01-06"));
|
|
||||||
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "03"));
|
|
||||||
encs = toList(myEncounterDao.search(params));
|
|
||||||
assertEquals(1, encs.size());
|
|
||||||
|
|
||||||
params = new SearchParameterMap();
|
|
||||||
params.add(Encounter.SP_DATE, new DateRangeParam("2001-01-01", null));
|
|
||||||
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "03"));
|
|
||||||
encs = toList(myEncounterDao.search(params));
|
|
||||||
assertEquals(1, encs.size());
|
|
||||||
|
|
||||||
params = new SearchParameterMap();
|
|
||||||
params.add(Encounter.SP_DATE, new DateRangeParam(null, "2001-01-03"));
|
|
||||||
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "03"));
|
|
||||||
encs = toList(myEncounterDao.search(params));
|
|
||||||
assertEquals(1, encs.size());
|
|
||||||
|
|
||||||
params = new SearchParameterMap();
|
|
||||||
params.add(Encounter.SP_DATE, new DateRangeParam(null, "2001-01-05"));
|
|
||||||
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "03"));
|
|
||||||
encs = toList(myEncounterDao.search(params));
|
|
||||||
assertEquals(1, encs.size());
|
|
||||||
|
|
||||||
params = new SearchParameterMap();
|
|
||||||
params.add(Encounter.SP_DATE, new DateRangeParam(null, "2001-01-01"));
|
|
||||||
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "03"));
|
|
||||||
encs = toList(myEncounterDao.search(params));
|
|
||||||
assertEquals(0, encs.size());
|
|
||||||
|
|
||||||
params = new SearchParameterMap();
|
|
||||||
params.add(Encounter.SP_DATE, new DateRangeParam("2001-01-05", null));
|
|
||||||
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "03"));
|
|
||||||
encs = toList(myEncounterDao.search(params));
|
|
||||||
assertEquals(0, encs.size());
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testDatePeriodParamStartOnly() {
|
|
||||||
{
|
|
||||||
Encounter enc = new Encounter();
|
|
||||||
enc.addIdentifier().setSystem("testDatePeriodParam").setValue("01");
|
|
||||||
enc.getPeriod().getStartElement().setValueAsString("2001-01-02");
|
|
||||||
myEncounterDao.create(enc, mySrd);
|
|
||||||
}
|
|
||||||
|
|
||||||
SearchParameterMap params = new SearchParameterMap();
|
|
||||||
params.add(Encounter.SP_DATE, new DateRangeParam("2001-01-01", "2001-01-03"));
|
|
||||||
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "01"));
|
|
||||||
List<Encounter> encs = toList(myEncounterDao.search(params));
|
|
||||||
assertEquals(1, encs.size());
|
|
||||||
|
|
||||||
params = new SearchParameterMap();
|
|
||||||
params.add(Encounter.SP_DATE, new DateRangeParam("2001-01-01", null));
|
|
||||||
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "01"));
|
|
||||||
encs = toList(myEncounterDao.search(params));
|
|
||||||
assertEquals(1, encs.size());
|
|
||||||
|
|
||||||
params = new SearchParameterMap();
|
|
||||||
params.add(Encounter.SP_DATE, new DateRangeParam(null, "2001-01-03"));
|
|
||||||
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "01"));
|
|
||||||
encs = toList(myEncounterDao.search(params));
|
|
||||||
assertEquals(1, encs.size());
|
|
||||||
|
|
||||||
params = new SearchParameterMap();
|
|
||||||
params.add(Encounter.SP_DATE, new DateRangeParam(null, "2001-01-01"));
|
|
||||||
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "01"));
|
|
||||||
encs = toList(myEncounterDao.search(params));
|
|
||||||
assertEquals(0, encs.size());
|
|
||||||
|
|
||||||
params = new SearchParameterMap();
|
|
||||||
params.add(Encounter.SP_DATE, new DateRangeParam("2001-01-03", null));
|
|
||||||
params.add(Encounter.SP_IDENTIFIER, new TokenParam("testDatePeriodParam", "01"));
|
|
||||||
encs = toList(myEncounterDao.search(params));
|
|
||||||
assertEquals(0, encs.size());
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testDeleteFailsIfIncomingLinks() {
|
public void testDeleteFailsIfIncomingLinks() {
|
||||||
|
|
|
@ -68,6 +68,7 @@ import java.util.ArrayList;
|
||||||
import java.util.Date;
|
import java.util.Date;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.function.Consumer;
|
import java.util.function.Consumer;
|
||||||
|
import java.util.stream.Collector;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
import static ca.uhn.fhir.jpa.util.TestUtil.sleepAtLeast;
|
import static ca.uhn.fhir.jpa.util.TestUtil.sleepAtLeast;
|
||||||
|
@ -701,6 +702,60 @@ public class PartitioningR4Test extends BaseJpaR4SystemTest {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testUpdateConditionalInPartition() {
|
||||||
|
myDaoConfig.setIndexMissingFields(DaoConfig.IndexEnabledEnum.DISABLED);
|
||||||
|
createRequestId();
|
||||||
|
|
||||||
|
// Create a resource
|
||||||
|
addCreatePartition(myPartitionId, myPartitionDate);
|
||||||
|
addReadPartition(myPartitionId);
|
||||||
|
Patient p = new Patient();
|
||||||
|
p.setActive(false);
|
||||||
|
p.addIdentifier().setValue("12345");
|
||||||
|
Long patientId = myPatientDao.update(p, "Patient?identifier=12345", mySrd).getId().getIdPartAsLong();
|
||||||
|
runInTransaction(() -> {
|
||||||
|
// HFJ_RESOURCE
|
||||||
|
assertEquals(1, myResourceTableDao.count());
|
||||||
|
ResourceTable resourceTable = myResourceTableDao.findById(patientId).orElseThrow(IllegalArgumentException::new);
|
||||||
|
assertEquals(myPartitionId, resourceTable.getPartitionId().getPartitionId().intValue());
|
||||||
|
assertEquals(myPartitionDate, resourceTable.getPartitionId().getPartitionDate());
|
||||||
|
|
||||||
|
// HFJ_SPIDX_TOKEN
|
||||||
|
ourLog.info("Tokens:\n * {}", myResourceIndexedSearchParamTokenDao.findAll().stream().map(t->t.toString()).collect(Collectors.joining("\n * ")));
|
||||||
|
assertEquals(3, myResourceIndexedSearchParamTokenDao.countForResourceId(patientId));
|
||||||
|
});
|
||||||
|
|
||||||
|
// Update that resource
|
||||||
|
addReadPartition(myPartitionId);
|
||||||
|
p = new Patient();
|
||||||
|
p.setActive(true);
|
||||||
|
p.addIdentifier().setValue("12345");
|
||||||
|
Long patientId2 = myPatientDao.update(p, "Patient?identifier=12345", mySrd).getId().getIdPartAsLong();
|
||||||
|
|
||||||
|
assertEquals(patientId, patientId2);
|
||||||
|
|
||||||
|
runInTransaction(() -> {
|
||||||
|
// HFJ_RESOURCE
|
||||||
|
assertEquals(1, myResourceTableDao.count());
|
||||||
|
ResourceTable resourceTable = myResourceTableDao.findById(patientId).orElseThrow(IllegalArgumentException::new);
|
||||||
|
assertEquals(myPartitionId, resourceTable.getPartitionId().getPartitionId().intValue());
|
||||||
|
assertEquals(myPartitionDate, resourceTable.getPartitionId().getPartitionDate());
|
||||||
|
|
||||||
|
// HFJ_SPIDX_TOKEN
|
||||||
|
ourLog.info("Tokens:\n * {}", myResourceIndexedSearchParamTokenDao.findAll().stream().map(t->t.toString()).collect(Collectors.joining("\n * ")));
|
||||||
|
assertEquals(3, myResourceIndexedSearchParamTokenDao.countForResourceId(patientId));
|
||||||
|
|
||||||
|
// HFJ_RES_VER
|
||||||
|
int version = 2;
|
||||||
|
ResourceHistoryTable resVer = myResourceHistoryTableDao.findForIdAndVersionAndFetchProvenance(patientId, version);
|
||||||
|
assertEquals(myPartitionId, resVer.getPartitionId().getPartitionId().intValue());
|
||||||
|
assertEquals(myPartitionDate, resVer.getPartitionId().getPartitionDate());
|
||||||
|
|
||||||
|
});
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testRead_PidId_AllPartitions() {
|
public void testRead_PidId_AllPartitions() {
|
||||||
IIdType patientId1 = createPatient(withPartition(1), withActiveTrue());
|
IIdType patientId1 = createPatient(withPartition(1), withActiveTrue());
|
||||||
|
|
|
@ -0,0 +1,417 @@
|
||||||
|
package ca.uhn.fhir.jpa.dao.r4;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.context.FhirContext;
|
||||||
|
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
|
||||||
|
import ca.uhn.fhir.jpa.config.TestR4ConfigWithElasticsearchClient;
|
||||||
|
import ca.uhn.fhir.jpa.dao.data.IObservationIndexedCodeCodeableConceptSearchParamDao;
|
||||||
|
import ca.uhn.fhir.jpa.dao.ObservationLastNIndexPersistSvc;
|
||||||
|
import ca.uhn.fhir.jpa.model.entity.ObservationIndexedCodeCodeableConceptEntity;
|
||||||
|
import ca.uhn.fhir.jpa.model.entity.ObservationIndexedSearchParamLastNEntity;
|
||||||
|
import ca.uhn.fhir.jpa.dao.data.IObservationIndexedSearchParamLastNDao;
|
||||||
|
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||||
|
import ca.uhn.fhir.jpa.search.lastn.ElasticsearchSvcImpl;
|
||||||
|
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||||
|
import ca.uhn.fhir.parser.IParser;
|
||||||
|
import ca.uhn.fhir.rest.param.*;
|
||||||
|
import com.google.common.base.Charsets;
|
||||||
|
import org.apache.commons.io.IOUtils;
|
||||||
|
import org.hl7.fhir.r4.model.*;
|
||||||
|
import org.junit.Before;
|
||||||
|
import org.junit.Test;
|
||||||
|
import org.junit.runner.RunWith;
|
||||||
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
import org.springframework.core.io.Resource;
|
||||||
|
import org.springframework.core.io.support.PathMatchingResourcePatternResolver;
|
||||||
|
import org.springframework.test.context.ContextConfiguration;
|
||||||
|
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
|
||||||
|
|
||||||
|
import javax.persistence.EntityManager;
|
||||||
|
import javax.persistence.PersistenceContext;
|
||||||
|
import javax.persistence.PersistenceContextType;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.InputStream;
|
||||||
|
import java.util.*;
|
||||||
|
import java.util.concurrent.atomic.AtomicInteger;
|
||||||
|
|
||||||
|
import static org.junit.Assert.assertEquals;
|
||||||
|
import static org.junit.Assert.assertNotNull;
|
||||||
|
import static org.junit.Assert.assertNull;
|
||||||
|
import static org.junit.Assert.assertTrue;
|
||||||
|
|
||||||
|
@RunWith(SpringJUnit4ClassRunner.class)
|
||||||
|
@ContextConfiguration(classes = {TestR4ConfigWithElasticsearchClient.class})
|
||||||
|
public class PersistObservationIndexedSearchParamLastNR4IT {
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
IObservationIndexedSearchParamLastNDao myResourceIndexedObservationLastNDao;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
IObservationIndexedCodeCodeableConceptSearchParamDao myCodeableConceptIndexedSearchParamNormalizedDao;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private ElasticsearchSvcImpl elasticsearchSvc;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private IFhirSystemDao<Bundle, Meta> myDao;
|
||||||
|
|
||||||
|
@PersistenceContext(type = PersistenceContextType.TRANSACTION)
|
||||||
|
protected EntityManager myEntityManager;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
ObservationLastNIndexPersistSvc testObservationPersist;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
protected FhirContext myFhirCtx;
|
||||||
|
|
||||||
|
@Before
|
||||||
|
public void before() {
|
||||||
|
|
||||||
|
myResourceIndexedObservationLastNDao.deleteAll();
|
||||||
|
myCodeableConceptIndexedSearchParamNormalizedDao.deleteAll();
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private final String SINGLE_SUBJECT_ID = "4567";
|
||||||
|
private final String SINGLE_OBSERVATION_PID = "123";
|
||||||
|
private final Date SINGLE_EFFECTIVEDTM = new Date();
|
||||||
|
private final String SINGLE_OBSERVATION_CODE_TEXT = "Test Codeable Concept Field for Code";
|
||||||
|
|
||||||
|
private final String CATEGORYFIRSTCODINGSYSTEM = "http://mycodes.org/fhir/observation-category";
|
||||||
|
private final String FIRSTCATEGORYFIRSTCODINGCODE = "test-heart-rate";
|
||||||
|
|
||||||
|
private final String CODEFIRSTCODINGSYSTEM = "http://mycodes.org/fhir/observation-code";
|
||||||
|
private final String CODEFIRSTCODINGCODE = "test-code";
|
||||||
|
|
||||||
|
private ReferenceAndListParam multiSubjectParams = null;
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testIndexObservationSingle() {
|
||||||
|
indexSingleObservation();
|
||||||
|
List<ObservationIndexedSearchParamLastNEntity> persistedObservationEntities = myResourceIndexedObservationLastNDao.findAll();
|
||||||
|
assertEquals(1, persistedObservationEntities.size());
|
||||||
|
ObservationIndexedSearchParamLastNEntity persistedObservationEntity = persistedObservationEntities.get(0);
|
||||||
|
assertEquals(SINGLE_SUBJECT_ID, persistedObservationEntity.getSubject());
|
||||||
|
assertEquals(SINGLE_OBSERVATION_PID, persistedObservationEntity.getIdentifier());
|
||||||
|
assertEquals(SINGLE_EFFECTIVEDTM, persistedObservationEntity.getEffectiveDtm());
|
||||||
|
|
||||||
|
String observationCodeNormalizedId = persistedObservationEntity.getCodeNormalizedId();
|
||||||
|
|
||||||
|
List<ObservationIndexedCodeCodeableConceptEntity> persistedObservationCodes = myCodeableConceptIndexedSearchParamNormalizedDao.findAll();
|
||||||
|
assertEquals(1, persistedObservationCodes.size());
|
||||||
|
ObservationIndexedCodeCodeableConceptEntity persistedObservationCode = persistedObservationCodes.get(0);
|
||||||
|
assertEquals(observationCodeNormalizedId, persistedObservationCode.getCodeableConceptId());
|
||||||
|
assertEquals(SINGLE_OBSERVATION_CODE_TEXT, persistedObservationCode.getCodeableConceptText());
|
||||||
|
|
||||||
|
SearchParameterMap searchParameterMap = new SearchParameterMap();
|
||||||
|
ReferenceParam subjectParam = new ReferenceParam("Patient", "", SINGLE_SUBJECT_ID);
|
||||||
|
searchParameterMap.add(Observation.SP_SUBJECT, new ReferenceAndListParam().addAnd(new ReferenceOrListParam().addOr(subjectParam)));
|
||||||
|
TokenParam categoryParam = new TokenParam(CATEGORYFIRSTCODINGSYSTEM, FIRSTCATEGORYFIRSTCODINGCODE);
|
||||||
|
searchParameterMap.add(Observation.SP_CATEGORY, new TokenAndListParam().addAnd(new TokenOrListParam().addOr(categoryParam)));
|
||||||
|
TokenParam codeParam = new TokenParam(CODEFIRSTCODINGSYSTEM, CODEFIRSTCODINGCODE);
|
||||||
|
searchParameterMap.add(Observation.SP_CODE, new TokenAndListParam().addAnd(new TokenOrListParam().addOr(codeParam)));
|
||||||
|
searchParameterMap.setLastNMax(3);
|
||||||
|
|
||||||
|
List<String> observationIdsOnly = elasticsearchSvc.executeLastN(searchParameterMap, myFhirCtx, 100);
|
||||||
|
|
||||||
|
assertEquals(1, observationIdsOnly.size());
|
||||||
|
assertEquals(SINGLE_OBSERVATION_PID, observationIdsOnly.get(0));
|
||||||
|
}
|
||||||
|
|
||||||
|
private void indexSingleObservation() {
|
||||||
|
|
||||||
|
Observation myObservation = new Observation();
|
||||||
|
IdType observationID = new IdType("Observation", SINGLE_OBSERVATION_PID, "1");
|
||||||
|
myObservation.setId(observationID);
|
||||||
|
Reference subjectId = new Reference(SINGLE_SUBJECT_ID);
|
||||||
|
myObservation.setSubject(subjectId);
|
||||||
|
myObservation.setEffective(new DateTimeType(SINGLE_EFFECTIVEDTM));
|
||||||
|
|
||||||
|
myObservation.setCategory(getCategoryCode());
|
||||||
|
|
||||||
|
myObservation.setCode(getObservationCode());
|
||||||
|
|
||||||
|
testObservationPersist.indexObservation(myObservation);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private List<CodeableConcept> getCategoryCode() {
|
||||||
|
// Add three CodeableConcepts for category
|
||||||
|
List<CodeableConcept> categoryConcepts = new ArrayList<>();
|
||||||
|
// Create three codings and first category CodeableConcept
|
||||||
|
List<Coding> category = new ArrayList<>();
|
||||||
|
CodeableConcept categoryCodeableConcept1 = new CodeableConcept().setText("Test Codeable Concept Field for first category");
|
||||||
|
category.add(new Coding(CATEGORYFIRSTCODINGSYSTEM, FIRSTCATEGORYFIRSTCODINGCODE, "test-heart-rate display"));
|
||||||
|
category.add(new Coding("http://myalternatecodes.org/fhir/observation-category", "test-alt-heart-rate", "test-alt-heart-rate display"));
|
||||||
|
category.add(new Coding("http://mysecondaltcodes.org/fhir/observation-category", "test-2nd-alt-heart-rate", "test-2nd-alt-heart-rate display"));
|
||||||
|
categoryCodeableConcept1.setCoding(category);
|
||||||
|
categoryConcepts.add(categoryCodeableConcept1);
|
||||||
|
// Create three codings and second category CodeableConcept
|
||||||
|
List<Coding> category2 = new ArrayList<>();
|
||||||
|
CodeableConcept categoryCodeableConcept2 = new CodeableConcept().setText("Test Codeable Concept Field for for second category");
|
||||||
|
category2.add(new Coding(CATEGORYFIRSTCODINGSYSTEM, "test-vital-signs", "test-vital-signs display"));
|
||||||
|
category2.add(new Coding("http://myalternatecodes.org/fhir/observation-category", "test-alt-vitals", "test-alt-vitals display"));
|
||||||
|
category2.add(new Coding("http://mysecondaltcodes.org/fhir/observation-category", "test-2nd-alt-vitals", "test-2nd-alt-vitals display"));
|
||||||
|
categoryCodeableConcept2.setCoding(category2);
|
||||||
|
categoryConcepts.add(categoryCodeableConcept2);
|
||||||
|
// Create three codings and third category CodeableConcept
|
||||||
|
List<Coding> category3 = new ArrayList<>();
|
||||||
|
CodeableConcept categoryCodeableConcept3 = new CodeableConcept().setText("Test Codeable Concept Field for third category");
|
||||||
|
category3.add(new Coding(CATEGORYFIRSTCODINGSYSTEM, "test-vitals-panel", "test-vitals-panel display"));
|
||||||
|
category3.add(new Coding("http://myalternatecodes.org/fhir/observation-category", "test-alt-vitals-panel", "test-alt-vitals-panel display"));
|
||||||
|
category3.add(new Coding("http://mysecondaltcodes.org/fhir/observation-category", "test-2nd-alt-vitals-panel", "test-2nd-alt-vitals-panel display"));
|
||||||
|
categoryCodeableConcept3.setCoding(category3);
|
||||||
|
categoryConcepts.add(categoryCodeableConcept3);
|
||||||
|
return categoryConcepts;
|
||||||
|
}
|
||||||
|
|
||||||
|
private CodeableConcept getObservationCode() {
|
||||||
|
// Create CodeableConcept for Code with three codings.
|
||||||
|
CodeableConcept codeableConceptField = new CodeableConcept().setText(SINGLE_OBSERVATION_CODE_TEXT);
|
||||||
|
codeableConceptField.addCoding(new Coding(CODEFIRSTCODINGSYSTEM, CODEFIRSTCODINGCODE, "test-code display"));
|
||||||
|
return codeableConceptField;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testIndexObservationMultiple() {
|
||||||
|
indexMultipleObservations();
|
||||||
|
assertEquals(100, myResourceIndexedObservationLastNDao.count());
|
||||||
|
assertEquals(2, myCodeableConceptIndexedSearchParamNormalizedDao.count());
|
||||||
|
|
||||||
|
// Check that all observations were indexed.
|
||||||
|
SearchParameterMap searchParameterMap = new SearchParameterMap();
|
||||||
|
searchParameterMap.add(Observation.SP_SUBJECT, multiSubjectParams);
|
||||||
|
|
||||||
|
searchParameterMap.setLastNMax(10);
|
||||||
|
|
||||||
|
List<String> observationIdsOnly = elasticsearchSvc.executeLastN(searchParameterMap, myFhirCtx, 200);
|
||||||
|
assertEquals(100, observationIdsOnly.size());
|
||||||
|
|
||||||
|
// Filter the results by category code.
|
||||||
|
TokenParam categoryParam = new TokenParam(CATEGORYFIRSTCODINGSYSTEM, FIRSTCATEGORYFIRSTCODINGCODE);
|
||||||
|
searchParameterMap.add(Observation.SP_CATEGORY, new TokenAndListParam().addAnd(new TokenOrListParam().addOr(categoryParam)));
|
||||||
|
|
||||||
|
|
||||||
|
observationIdsOnly = elasticsearchSvc.executeLastN(searchParameterMap, myFhirCtx, 100);
|
||||||
|
|
||||||
|
assertEquals(50, observationIdsOnly.size());
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private void indexMultipleObservations() {
|
||||||
|
|
||||||
|
// Create two CodeableConcept values each for a Code with three codings.
|
||||||
|
CodeableConcept codeableConceptField1 = new CodeableConcept().setText("Test Codeable Concept Field for First Code");
|
||||||
|
codeableConceptField1.addCoding(new Coding(CODEFIRSTCODINGSYSTEM, "test-code-1", "test-code-1 display"));
|
||||||
|
codeableConceptField1.addCoding(new Coding("http://myalternatecodes.org/fhir/observation-code", "test-alt-code-1", "test-alt-code-1 display"));
|
||||||
|
codeableConceptField1.addCoding(new Coding("http://mysecondaltcodes.org/fhir/observation-code", "test-second-alt-code-1", "test-second-alt-code-1 display"));
|
||||||
|
|
||||||
|
CodeableConcept codeableConceptField2 = new CodeableConcept().setText("Test Codeable Concept Field for Second Code");
|
||||||
|
codeableConceptField2.addCoding(new Coding(CODEFIRSTCODINGSYSTEM, "test-code-2", "test-code-2 display"));
|
||||||
|
codeableConceptField2.addCoding(new Coding("http://myalternatecodes.org/fhir/observation-code", "test-alt-code-2", "test-alt-code-2 display"));
|
||||||
|
codeableConceptField2.addCoding(new Coding("http://mysecondaltcodes.org/fhir/observation-code", "test-second-alt-code-2", "test-second-alt-code-2 display"));
|
||||||
|
|
||||||
|
// Create two CodeableConcept entities for category, each with three codings.
|
||||||
|
List<Coding> category1 = new ArrayList<>();
|
||||||
|
// Create three codings and first category CodeableConcept
|
||||||
|
category1.add(new Coding(CATEGORYFIRSTCODINGSYSTEM, FIRSTCATEGORYFIRSTCODINGCODE, "test-heart-rate display"));
|
||||||
|
category1.add(new Coding("http://myalternatecodes.org/fhir/observation-category", "test-alt-heart-rate", "test-alt-heart-rate display"));
|
||||||
|
category1.add(new Coding("http://mysecondaltcodes.org/fhir/observation-category", "test-2nd-alt-heart-rate", "test-2nd-alt-heart-rate display"));
|
||||||
|
List<CodeableConcept> categoryConcepts1 = new ArrayList<>();
|
||||||
|
CodeableConcept categoryCodeableConcept1 = new CodeableConcept().setText("Test Codeable Concept Field for first category");
|
||||||
|
categoryCodeableConcept1.setCoding(category1);
|
||||||
|
categoryConcepts1.add(categoryCodeableConcept1);
|
||||||
|
// Create three codings and second category CodeableConcept
|
||||||
|
List<Coding> category2 = new ArrayList<>();
|
||||||
|
category2.add(new Coding(CATEGORYFIRSTCODINGSYSTEM, "test-vital-signs", "test-vital-signs display"));
|
||||||
|
category2.add(new Coding("http://myalternatecodes.org/fhir/observation-category", "test-alt-vitals", "test-alt-vitals display"));
|
||||||
|
category2.add(new Coding("http://mysecondaltcodes.org/fhir/observation-category", "test-2nd-alt-vitals", "test-2nd-alt-vitals display"));
|
||||||
|
List<CodeableConcept> categoryConcepts2 = new ArrayList<>();
|
||||||
|
CodeableConcept categoryCodeableConcept2 = new CodeableConcept().setText("Test Codeable Concept Field for second category");
|
||||||
|
categoryCodeableConcept2.setCoding(category2);
|
||||||
|
categoryConcepts2.add(categoryCodeableConcept2);
|
||||||
|
|
||||||
|
ReferenceOrListParam subjectParams = new ReferenceOrListParam();
|
||||||
|
for (int patientCount = 0; patientCount < 10; patientCount++) {
|
||||||
|
|
||||||
|
String subjectId = String.valueOf(patientCount);
|
||||||
|
|
||||||
|
ReferenceParam subjectParam = new ReferenceParam("Patient", "", subjectId);
|
||||||
|
subjectParams.addOr(subjectParam);
|
||||||
|
|
||||||
|
for (int entryCount = 0; entryCount < 10; entryCount++) {
|
||||||
|
|
||||||
|
Observation observation = new Observation();
|
||||||
|
IdType observationId = new IdType("Observation", String.valueOf(entryCount + patientCount * 10), "1");
|
||||||
|
observation.setId(observationId);
|
||||||
|
Reference subject = new Reference(subjectId);
|
||||||
|
observation.setSubject(subject);
|
||||||
|
|
||||||
|
if (entryCount % 2 == 1) {
|
||||||
|
observation.setCategory(categoryConcepts1);
|
||||||
|
observation.setCode(codeableConceptField1);
|
||||||
|
} else {
|
||||||
|
observation.setCategory(categoryConcepts2);
|
||||||
|
observation.setCode(codeableConceptField2);
|
||||||
|
}
|
||||||
|
|
||||||
|
Calendar observationDate = new GregorianCalendar();
|
||||||
|
observationDate.add(Calendar.HOUR, -10 + entryCount);
|
||||||
|
Date effectiveDtm = observationDate.getTime();
|
||||||
|
observation.setEffective(new DateTimeType(effectiveDtm));
|
||||||
|
|
||||||
|
testObservationPersist.indexObservation(observation);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
multiSubjectParams = new ReferenceAndListParam().addAnd(subjectParams);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testDeleteObservation() {
|
||||||
|
indexMultipleObservations();
|
||||||
|
assertEquals(100, myResourceIndexedObservationLastNDao.count());
|
||||||
|
// Check that fifth observation for fifth patient has been indexed.
|
||||||
|
ObservationIndexedSearchParamLastNEntity observation = myResourceIndexedObservationLastNDao.findByIdentifier("55");
|
||||||
|
assertNotNull(observation);
|
||||||
|
|
||||||
|
SearchParameterMap searchParameterMap = new SearchParameterMap();
|
||||||
|
searchParameterMap.add(Observation.SP_SUBJECT, multiSubjectParams);
|
||||||
|
searchParameterMap.setLastNMax(10);
|
||||||
|
List<String> observationIdsOnly = elasticsearchSvc.executeLastN(searchParameterMap, myFhirCtx, 200);
|
||||||
|
assertEquals(100, observationIdsOnly.size());
|
||||||
|
assertTrue(observationIdsOnly.contains("55"));
|
||||||
|
|
||||||
|
// Delete fifth observation for fifth patient.
|
||||||
|
ResourceTable entity = new ResourceTable();
|
||||||
|
entity.setId(55L);
|
||||||
|
entity.setResourceType("Observation");
|
||||||
|
entity.setVersion(0L);
|
||||||
|
|
||||||
|
testObservationPersist.deleteObservationIndex(entity);
|
||||||
|
|
||||||
|
// Confirm that observation was deleted.
|
||||||
|
assertEquals(99, myResourceIndexedObservationLastNDao.count());
|
||||||
|
observation = myResourceIndexedObservationLastNDao.findByIdentifier("55");
|
||||||
|
assertNull(observation);
|
||||||
|
|
||||||
|
observationIdsOnly = elasticsearchSvc.executeLastN(searchParameterMap, myFhirCtx, 200);
|
||||||
|
assertEquals(99, observationIdsOnly.size());
|
||||||
|
assertTrue(!observationIdsOnly.contains("55"));
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testUpdateObservation() {
|
||||||
|
indexSingleObservation();
|
||||||
|
ObservationIndexedSearchParamLastNEntity observationIndexEntity = myResourceIndexedObservationLastNDao.findAll().get(0);
|
||||||
|
assertEquals(SINGLE_OBSERVATION_PID, observationIndexEntity.getIdentifier());
|
||||||
|
assertEquals(SINGLE_SUBJECT_ID, observationIndexEntity.getSubject());
|
||||||
|
assertEquals(SINGLE_EFFECTIVEDTM, observationIndexEntity.getEffectiveDtm());
|
||||||
|
|
||||||
|
SearchParameterMap searchParameterMap = new SearchParameterMap();
|
||||||
|
ReferenceParam subjectParam = new ReferenceParam("Patient", "", SINGLE_SUBJECT_ID);
|
||||||
|
searchParameterMap.add(Observation.SP_SUBJECT, new ReferenceAndListParam().addAnd(new ReferenceOrListParam().addOr(subjectParam)));
|
||||||
|
TokenParam categoryParam = new TokenParam(CATEGORYFIRSTCODINGSYSTEM, FIRSTCATEGORYFIRSTCODINGCODE);
|
||||||
|
searchParameterMap.add(Observation.SP_CATEGORY, new TokenAndListParam().addAnd(new TokenOrListParam().addOr(categoryParam)));
|
||||||
|
TokenParam codeParam = new TokenParam(CODEFIRSTCODINGSYSTEM, CODEFIRSTCODINGCODE);
|
||||||
|
searchParameterMap.add(Observation.SP_CODE, new TokenAndListParam().addAnd(new TokenOrListParam().addOr(codeParam)));
|
||||||
|
searchParameterMap.setLastNMax(10);
|
||||||
|
|
||||||
|
List<String> observationIdsOnly = elasticsearchSvc.executeLastN(searchParameterMap, myFhirCtx, 200);
|
||||||
|
assertEquals(1, observationIdsOnly.size());
|
||||||
|
assertTrue(observationIdsOnly.contains(SINGLE_OBSERVATION_PID));
|
||||||
|
|
||||||
|
// Update the Observation with a new Subject and effective date:
|
||||||
|
Observation updatedObservation = new Observation();
|
||||||
|
IdType observationId = new IdType("Observation", observationIndexEntity.getIdentifier(), "2");
|
||||||
|
updatedObservation.setId(observationId);
|
||||||
|
Reference subjectId = new Reference("1234");
|
||||||
|
updatedObservation.setSubject(subjectId);
|
||||||
|
DateTimeType newEffectiveDtm = new DateTimeType(new Date());
|
||||||
|
updatedObservation.setEffective(newEffectiveDtm);
|
||||||
|
updatedObservation.setCategory(getCategoryCode());
|
||||||
|
updatedObservation.setCode(getObservationCode());
|
||||||
|
|
||||||
|
testObservationPersist.indexObservation(updatedObservation);
|
||||||
|
|
||||||
|
ObservationIndexedSearchParamLastNEntity updatedObservationEntity = myResourceIndexedObservationLastNDao.findByIdentifier(SINGLE_OBSERVATION_PID);
|
||||||
|
assertEquals("1234", updatedObservationEntity.getSubject());
|
||||||
|
assertEquals(newEffectiveDtm.getValue(), updatedObservationEntity.getEffectiveDtm());
|
||||||
|
|
||||||
|
// Repeat earlier Elasticsearch query. This time, should return no matches.
|
||||||
|
observationIdsOnly = elasticsearchSvc.executeLastN(searchParameterMap, myFhirCtx, 200);
|
||||||
|
assertEquals(0, observationIdsOnly.size());
|
||||||
|
|
||||||
|
// Try again with the new patient ID.
|
||||||
|
searchParameterMap = new SearchParameterMap();
|
||||||
|
subjectParam = new ReferenceParam("Patient", "", "1234");
|
||||||
|
searchParameterMap.add(Observation.SP_SUBJECT, new ReferenceAndListParam().addAnd(new ReferenceOrListParam().addOr(subjectParam)));
|
||||||
|
searchParameterMap.add(Observation.SP_CATEGORY, new TokenAndListParam().addAnd(new TokenOrListParam().addOr(categoryParam)));
|
||||||
|
searchParameterMap.add(Observation.SP_CODE, new TokenAndListParam().addAnd(new TokenOrListParam().addOr(codeParam)));
|
||||||
|
searchParameterMap.setLastNMax(10);
|
||||||
|
observationIdsOnly = elasticsearchSvc.executeLastN(searchParameterMap, myFhirCtx, 200);
|
||||||
|
|
||||||
|
// Should see the observation returned now.
|
||||||
|
assertEquals(1, observationIdsOnly.size());
|
||||||
|
assertTrue(observationIdsOnly.contains(SINGLE_OBSERVATION_PID));
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testSampleBundleInTransaction() throws IOException {
|
||||||
|
FhirContext myFhirCtx = FhirContext.forR4();
|
||||||
|
|
||||||
|
PathMatchingResourcePatternResolver provider = new PathMatchingResourcePatternResolver();
|
||||||
|
final Resource[] bundleResources;
|
||||||
|
bundleResources = provider.getResources("lastntestbundle.json");
|
||||||
|
|
||||||
|
AtomicInteger index = new AtomicInteger();
|
||||||
|
|
||||||
|
Arrays.stream(bundleResources).forEach(
|
||||||
|
resource -> {
|
||||||
|
index.incrementAndGet();
|
||||||
|
|
||||||
|
InputStream resIs = null;
|
||||||
|
String nextBundleString;
|
||||||
|
try {
|
||||||
|
resIs = resource.getInputStream();
|
||||||
|
nextBundleString = IOUtils.toString(resIs, Charsets.UTF_8);
|
||||||
|
} catch (IOException e) {
|
||||||
|
return;
|
||||||
|
} finally {
|
||||||
|
try {
|
||||||
|
if (resIs != null) {
|
||||||
|
resIs.close();
|
||||||
|
}
|
||||||
|
} catch (final IOException ioe) {
|
||||||
|
// ignore
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
IParser parser = myFhirCtx.newJsonParser();
|
||||||
|
Bundle bundle = parser.parseResource(Bundle.class, nextBundleString);
|
||||||
|
|
||||||
|
myDao.transaction(null, bundle);
|
||||||
|
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
SearchParameterMap searchParameterMap = new SearchParameterMap();
|
||||||
|
|
||||||
|
// execute Observation ID search - Composite Aggregation
|
||||||
|
searchParameterMap.setLastNMax(1);
|
||||||
|
List<String> observationIdsOnly = elasticsearchSvc.executeLastN(searchParameterMap,myFhirCtx, 200);
|
||||||
|
|
||||||
|
assertEquals(20, observationIdsOnly.size());
|
||||||
|
|
||||||
|
searchParameterMap.setLastNMax(3);
|
||||||
|
observationIdsOnly = elasticsearchSvc.executeLastN(searchParameterMap, myFhirCtx, 200);
|
||||||
|
|
||||||
|
assertEquals(38, observationIdsOnly.size());
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,419 @@
|
||||||
|
package ca.uhn.fhir.jpa.search.lastn;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.context.FhirContext;
|
||||||
|
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||||
|
import ca.uhn.fhir.rest.param.*;
|
||||||
|
import ca.uhn.fhir.jpa.search.lastn.config.TestElasticsearchConfig;
|
||||||
|
import ca.uhn.fhir.jpa.search.lastn.json.CodeJson;
|
||||||
|
import ca.uhn.fhir.jpa.search.lastn.json.ObservationJson;
|
||||||
|
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||||
|
import com.fasterxml.jackson.core.JsonGenerator;
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
import com.fasterxml.jackson.databind.SerializationFeature;
|
||||||
|
import org.hl7.fhir.r4.model.Observation;
|
||||||
|
import org.junit.*;
|
||||||
|
import org.hl7.fhir.r4.model.CodeableConcept;
|
||||||
|
import org.hl7.fhir.r4.model.Coding;
|
||||||
|
import org.junit.runner.RunWith;
|
||||||
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
import org.springframework.test.context.ContextConfiguration;
|
||||||
|
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.*;
|
||||||
|
|
||||||
|
import static org.junit.Assert.*;
|
||||||
|
|
||||||
|
@RunWith(SpringJUnit4ClassRunner.class)
|
||||||
|
@ContextConfiguration(classes = {TestElasticsearchConfig.class})
|
||||||
|
public class LastNElasticsearchSvcMultipleObservationsIT {
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private ElasticsearchSvcImpl elasticsearchSvc;
|
||||||
|
|
||||||
|
private static ObjectMapper ourMapperNonPrettyPrint;
|
||||||
|
|
||||||
|
private final Map<String, Map<String, List<Date>>> createdPatientObservationMap = new HashMap<>();
|
||||||
|
|
||||||
|
private FhirContext myFhirContext = FhirContext.forR4();
|
||||||
|
|
||||||
|
|
||||||
|
@BeforeClass
|
||||||
|
public static void beforeClass() {
|
||||||
|
ourMapperNonPrettyPrint = new ObjectMapper();
|
||||||
|
ourMapperNonPrettyPrint.setSerializationInclusion(JsonInclude.Include.NON_NULL);
|
||||||
|
ourMapperNonPrettyPrint.disable(SerializationFeature.INDENT_OUTPUT);
|
||||||
|
ourMapperNonPrettyPrint.disable(JsonGenerator.Feature.AUTO_CLOSE_TARGET);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Before
|
||||||
|
public void before() throws IOException {
|
||||||
|
createMultiplePatientsAndObservations();
|
||||||
|
}
|
||||||
|
|
||||||
|
@After
|
||||||
|
public void after() throws IOException {
|
||||||
|
elasticsearchSvc.deleteAllDocuments(ElasticsearchSvcImpl.OBSERVATION_INDEX);
|
||||||
|
elasticsearchSvc.deleteAllDocuments(ElasticsearchSvcImpl.OBSERVATION_CODE_INDEX);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testLastNAllPatientsQuery() {
|
||||||
|
|
||||||
|
// execute Observation ID search (Composite Aggregation) last 3 observations for each patient
|
||||||
|
SearchParameterMap searchParameterMap = new SearchParameterMap();
|
||||||
|
ReferenceParam subjectParam = new ReferenceParam("Patient", "", "0");
|
||||||
|
searchParameterMap.add(Observation.SP_SUBJECT, buildReferenceAndListParam(subjectParam));
|
||||||
|
subjectParam = new ReferenceParam("Patient", "", "1");
|
||||||
|
searchParameterMap.add(Observation.SP_SUBJECT, buildReferenceAndListParam(subjectParam));
|
||||||
|
subjectParam = new ReferenceParam("Patient", "", "2");
|
||||||
|
searchParameterMap.add(Observation.SP_SUBJECT, buildReferenceAndListParam(subjectParam));
|
||||||
|
subjectParam = new ReferenceParam("Patient", "", "3");
|
||||||
|
searchParameterMap.add(Observation.SP_SUBJECT, buildReferenceAndListParam(subjectParam));
|
||||||
|
subjectParam = new ReferenceParam("Patient", "", "4");
|
||||||
|
searchParameterMap.add(Observation.SP_SUBJECT, buildReferenceAndListParam(subjectParam));
|
||||||
|
subjectParam = new ReferenceParam("Patient", "", "5");
|
||||||
|
searchParameterMap.add(Observation.SP_SUBJECT, buildReferenceAndListParam(subjectParam));
|
||||||
|
subjectParam = new ReferenceParam("Patient", "", "6");
|
||||||
|
searchParameterMap.add(Observation.SP_SUBJECT, buildReferenceAndListParam(subjectParam));
|
||||||
|
subjectParam = new ReferenceParam("Patient", "", "7");
|
||||||
|
searchParameterMap.add(Observation.SP_SUBJECT, buildReferenceAndListParam(subjectParam));
|
||||||
|
subjectParam = new ReferenceParam("Patient", "", "8");
|
||||||
|
searchParameterMap.add(Observation.SP_SUBJECT, buildReferenceAndListParam(subjectParam));
|
||||||
|
subjectParam = new ReferenceParam("Patient", "", "9");
|
||||||
|
searchParameterMap.add(Observation.SP_SUBJECT, buildReferenceAndListParam(subjectParam));
|
||||||
|
searchParameterMap.setLastNMax(3);
|
||||||
|
|
||||||
|
List<ObservationJson> observations = elasticsearchSvc.executeLastNWithAllFields(searchParameterMap, myFhirContext);
|
||||||
|
|
||||||
|
assertEquals(60, observations.size());
|
||||||
|
|
||||||
|
// Observation documents should be grouped by subject, then by observation code, and then sorted by effective date/time
|
||||||
|
// within each observation code. Verify the grouping by creating a nested Map.
|
||||||
|
Map<String, Map<String, List<Date>>> queriedPatientObservationMap = new HashMap<>();
|
||||||
|
ObservationJson previousObservationJson = null;
|
||||||
|
for (ObservationJson observationJson : observations) {
|
||||||
|
assertNotNull(observationJson.getIdentifier());
|
||||||
|
assertNotNull(observationJson.getSubject());
|
||||||
|
assertNotNull(observationJson.getCode_concept_id());
|
||||||
|
assertNotNull(observationJson.getEffectiveDtm());
|
||||||
|
if (previousObservationJson == null) {
|
||||||
|
ArrayList<Date> observationDates = new ArrayList<>();
|
||||||
|
observationDates.add(observationJson.getEffectiveDtm());
|
||||||
|
Map<String, List<Date>> codeObservationMap = new HashMap<>();
|
||||||
|
codeObservationMap.put(observationJson.getCode_concept_id(), observationDates);
|
||||||
|
queriedPatientObservationMap.put(observationJson.getSubject(), codeObservationMap);
|
||||||
|
} else if (observationJson.getSubject().equals(previousObservationJson.getSubject())) {
|
||||||
|
if (observationJson.getCode_concept_id().equals(previousObservationJson.getCode_concept_id())) {
|
||||||
|
queriedPatientObservationMap.get(observationJson.getSubject()).get(observationJson.getCode_concept_id()).
|
||||||
|
add(observationJson.getEffectiveDtm());
|
||||||
|
} else {
|
||||||
|
Map<String, List<Date>> codeObservationDateMap = queriedPatientObservationMap.get(observationJson.getSubject());
|
||||||
|
// Ensure that code concept was not already retrieved out of order for this subject/patient.
|
||||||
|
assertFalse(codeObservationDateMap.containsKey(observationJson.getCode_concept_id()));
|
||||||
|
ArrayList<Date> observationDates = new ArrayList<>();
|
||||||
|
observationDates.add(observationJson.getEffectiveDtm());
|
||||||
|
codeObservationDateMap.put(observationJson.getCode_concept_id(), observationDates);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Ensure that subject/patient was not already retrieved out of order
|
||||||
|
assertFalse(queriedPatientObservationMap.containsKey(observationJson.getSubject()));
|
||||||
|
ArrayList<Date> observationDates = new ArrayList<>();
|
||||||
|
observationDates.add(observationJson.getEffectiveDtm());
|
||||||
|
Map<String, List<Date>> codeObservationMap = new HashMap<>();
|
||||||
|
codeObservationMap.put(observationJson.getCode_concept_id(), observationDates);
|
||||||
|
queriedPatientObservationMap.put(observationJson.getSubject(), codeObservationMap);
|
||||||
|
}
|
||||||
|
previousObservationJson = observationJson;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Finally check that only the most recent effective date/time values were returned and in the correct order.
|
||||||
|
for (String subjectId : queriedPatientObservationMap.keySet()) {
|
||||||
|
Map<String, List<Date>> queriedObservationCodeMap = queriedPatientObservationMap.get(subjectId);
|
||||||
|
Map<String, List<Date>> createdObservationCodeMap = createdPatientObservationMap.get(subjectId);
|
||||||
|
for (String observationCode : queriedObservationCodeMap.keySet()) {
|
||||||
|
List<Date> queriedObservationDates = queriedObservationCodeMap.get(observationCode);
|
||||||
|
List<Date> createdObservationDates = createdObservationCodeMap.get(observationCode);
|
||||||
|
for (int dateIdx = 0; dateIdx < queriedObservationDates.size(); dateIdx++) {
|
||||||
|
assertEquals(createdObservationDates.get(dateIdx), queriedObservationDates.get(dateIdx));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testLastNMultiPatientMultiCodeHashMultiCategoryHash() {
|
||||||
|
// Multiple Subject references
|
||||||
|
SearchParameterMap searchParameterMap = new SearchParameterMap();
|
||||||
|
ReferenceParam subjectParam1 = new ReferenceParam("Patient", "", "3");
|
||||||
|
ReferenceParam subjectParam2 = new ReferenceParam("Patient", "", "5");
|
||||||
|
searchParameterMap.add(Observation.SP_SUBJECT, buildReferenceAndListParam(subjectParam1, subjectParam2));
|
||||||
|
TokenParam categoryParam1 = new TokenParam("http://mycodes.org/fhir/observation-category", "test-heart-rate");
|
||||||
|
TokenParam categoryParam2 = new TokenParam("http://mycodes.org/fhir/observation-category", "test-vital-signs");
|
||||||
|
searchParameterMap.add(Observation.SP_CATEGORY, buildTokenAndListParam(categoryParam1, categoryParam2));
|
||||||
|
TokenParam codeParam1 = new TokenParam("http://mycodes.org/fhir/observation-code", "test-code-1");
|
||||||
|
TokenParam codeParam2 = new TokenParam("http://mycodes.org/fhir/observation-code", "test-code-2");
|
||||||
|
searchParameterMap.add(Observation.SP_CODE, buildTokenAndListParam(codeParam1, codeParam2));
|
||||||
|
searchParameterMap.setLastNMax(100);
|
||||||
|
|
||||||
|
List<String> observations = elasticsearchSvc.executeLastN(searchParameterMap, myFhirContext, 100);
|
||||||
|
|
||||||
|
assertEquals(20, observations.size());
|
||||||
|
|
||||||
|
// Repeat with multiple Patient parameter
|
||||||
|
searchParameterMap = new SearchParameterMap();
|
||||||
|
ReferenceParam patientParam1 = new ReferenceParam("Patient", "", "8");
|
||||||
|
ReferenceParam patientParam2 = new ReferenceParam("Patient", "", "6");
|
||||||
|
searchParameterMap.add(Observation.SP_PATIENT, buildReferenceAndListParam(patientParam1, patientParam2));
|
||||||
|
searchParameterMap.add(Observation.SP_CATEGORY, buildTokenAndListParam(categoryParam1, categoryParam2));
|
||||||
|
searchParameterMap.add(Observation.SP_CODE, buildTokenAndListParam(codeParam1, codeParam2));
|
||||||
|
searchParameterMap.setLastNMax(100);
|
||||||
|
|
||||||
|
observations = elasticsearchSvc.executeLastN(searchParameterMap, myFhirContext, 100);
|
||||||
|
|
||||||
|
assertEquals(20, observations.size());
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private ReferenceAndListParam buildReferenceAndListParam(ReferenceParam... theReference) {
|
||||||
|
ReferenceOrListParam myReferenceOrListParam = new ReferenceOrListParam();
|
||||||
|
for (ReferenceParam referenceParam : theReference) {
|
||||||
|
myReferenceOrListParam.addOr(referenceParam);
|
||||||
|
}
|
||||||
|
return new ReferenceAndListParam().addAnd(myReferenceOrListParam);
|
||||||
|
}
|
||||||
|
|
||||||
|
private TokenAndListParam buildTokenAndListParam(TokenParam... theToken) {
|
||||||
|
TokenOrListParam myTokenOrListParam = new TokenOrListParam();
|
||||||
|
for (TokenParam tokenParam : theToken) {
|
||||||
|
myTokenOrListParam.addOr(tokenParam);
|
||||||
|
}
|
||||||
|
return new TokenAndListParam().addAnd(myTokenOrListParam);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testLastNCodeCodeOnlyCategoryCodeOnly() {
|
||||||
|
// Include subject
|
||||||
|
SearchParameterMap searchParameterMap = new SearchParameterMap();
|
||||||
|
ReferenceParam subjectParam = new ReferenceParam("Patient", "", "3");
|
||||||
|
searchParameterMap.add(Observation.SP_SUBJECT, buildReferenceAndListParam(subjectParam));
|
||||||
|
TokenParam categoryParam = new TokenParam("test-heart-rate");
|
||||||
|
searchParameterMap.add(Observation.SP_CATEGORY, buildTokenAndListParam(categoryParam));
|
||||||
|
TokenParam codeParam = new TokenParam("test-code-1");
|
||||||
|
searchParameterMap.add(Observation.SP_CODE, buildTokenAndListParam(codeParam));
|
||||||
|
searchParameterMap.setLastNMax(100);
|
||||||
|
|
||||||
|
List<String> observations = elasticsearchSvc.executeLastN(searchParameterMap, myFhirContext, 100);
|
||||||
|
|
||||||
|
assertEquals(5, observations.size());
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testLastNCodeSystemOnlyCategorySystemOnly() {
|
||||||
|
// Include subject and patient
|
||||||
|
SearchParameterMap searchParameterMap = new SearchParameterMap();
|
||||||
|
ReferenceParam subjectParam = new ReferenceParam("Patient", "", "3");
|
||||||
|
searchParameterMap.add(Observation.SP_SUBJECT, buildReferenceAndListParam(subjectParam));
|
||||||
|
TokenParam categoryParam = new TokenParam("http://mycodes.org/fhir/observation-category", null);
|
||||||
|
searchParameterMap.add(Observation.SP_CATEGORY, buildTokenAndListParam(categoryParam));
|
||||||
|
TokenParam codeParam = new TokenParam("http://mycodes.org/fhir/observation-code", null);
|
||||||
|
searchParameterMap.add(Observation.SP_CODE, buildTokenAndListParam(codeParam));
|
||||||
|
searchParameterMap.setLastNMax(100);
|
||||||
|
|
||||||
|
List<String> observations = elasticsearchSvc.executeLastN(searchParameterMap, myFhirContext, 100);
|
||||||
|
|
||||||
|
assertEquals(10, observations.size());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testLastNCodeCodeTextCategoryTextOnly() {
|
||||||
|
SearchParameterMap searchParameterMap = new SearchParameterMap();
|
||||||
|
ReferenceParam subjectParam = new ReferenceParam("Patient", "", "3");
|
||||||
|
searchParameterMap.add(Observation.SP_SUBJECT, buildReferenceAndListParam(subjectParam));
|
||||||
|
TokenParam categoryParam = new TokenParam("test-heart-rate display");
|
||||||
|
categoryParam.setModifier(TokenParamModifier.TEXT);
|
||||||
|
searchParameterMap.add(Observation.SP_CATEGORY, buildTokenAndListParam(categoryParam));
|
||||||
|
TokenParam codeParam = new TokenParam("test-code-1 display");
|
||||||
|
codeParam.setModifier(TokenParamModifier.TEXT);
|
||||||
|
searchParameterMap.add(Observation.SP_CODE, buildTokenAndListParam(codeParam));
|
||||||
|
searchParameterMap.setLastNMax(100);
|
||||||
|
|
||||||
|
List<String> observations = elasticsearchSvc.executeLastN(searchParameterMap, myFhirContext, 100);
|
||||||
|
|
||||||
|
assertEquals(5, observations.size());
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testLastNNoMatchQueries() {
|
||||||
|
// Invalid Patient
|
||||||
|
SearchParameterMap searchParameterMap = new SearchParameterMap();
|
||||||
|
ReferenceParam patientParam = new ReferenceParam("Patient", "", "10");
|
||||||
|
searchParameterMap.add(Observation.SP_PATIENT, buildReferenceAndListParam(patientParam));
|
||||||
|
TokenParam categoryParam = new TokenParam("http://mycodes.org/fhir/observation-category", "test-heart-rate");
|
||||||
|
searchParameterMap.add(Observation.SP_CATEGORY, buildTokenAndListParam(categoryParam));
|
||||||
|
TokenParam codeParam = new TokenParam("http://mycodes.org/fhir/observation-code", "test-code-1");
|
||||||
|
searchParameterMap.add(Observation.SP_CODE, buildTokenAndListParam(codeParam));
|
||||||
|
searchParameterMap.setLastNMax(100);
|
||||||
|
|
||||||
|
List<String> observations = elasticsearchSvc.executeLastN(searchParameterMap, myFhirContext, 100);
|
||||||
|
assertEquals(0, observations.size());
|
||||||
|
|
||||||
|
// Invalid subject
|
||||||
|
searchParameterMap = new SearchParameterMap();
|
||||||
|
ReferenceParam subjectParam = new ReferenceParam("Patient", "", "10");
|
||||||
|
searchParameterMap.add(Observation.SP_SUBJECT, buildReferenceAndListParam(subjectParam));
|
||||||
|
categoryParam = new TokenParam("http://mycodes.org/fhir/observation-category", "test-heart-rate");
|
||||||
|
searchParameterMap.add(Observation.SP_CATEGORY, buildTokenAndListParam(categoryParam));
|
||||||
|
codeParam = new TokenParam("http://mycodes.org/fhir/observation-code", "test-code-1");
|
||||||
|
searchParameterMap.add(Observation.SP_CODE, buildTokenAndListParam(codeParam));
|
||||||
|
searchParameterMap.setLastNMax(100);
|
||||||
|
|
||||||
|
observations = elasticsearchSvc.executeLastN(searchParameterMap, myFhirContext, 100);
|
||||||
|
assertEquals(0, observations.size());
|
||||||
|
|
||||||
|
// Invalid observation code
|
||||||
|
searchParameterMap = new SearchParameterMap();
|
||||||
|
subjectParam = new ReferenceParam("Patient", "", "9");
|
||||||
|
searchParameterMap.add(Observation.SP_SUBJECT, buildReferenceAndListParam(subjectParam));
|
||||||
|
categoryParam = new TokenParam("http://mycodes.org/fhir/observation-category", "test-heart-rate");
|
||||||
|
searchParameterMap.add(Observation.SP_CATEGORY, buildTokenAndListParam(categoryParam));
|
||||||
|
codeParam = new TokenParam("http://mycodes.org/fhir/observation-code", "test-code-999");
|
||||||
|
searchParameterMap.add(Observation.SP_CODE, buildTokenAndListParam(codeParam));
|
||||||
|
searchParameterMap.setLastNMax(100);
|
||||||
|
|
||||||
|
observations = elasticsearchSvc.executeLastN(searchParameterMap, myFhirContext, 100);
|
||||||
|
assertEquals(0, observations.size());
|
||||||
|
|
||||||
|
// Invalid category code
|
||||||
|
searchParameterMap = new SearchParameterMap();
|
||||||
|
subjectParam = new ReferenceParam("Patient", "", "9");
|
||||||
|
searchParameterMap.add(Observation.SP_SUBJECT, buildReferenceAndListParam(subjectParam));
|
||||||
|
categoryParam = new TokenParam("http://mycodes.org/fhir/observation-category", "test-not-a-category");
|
||||||
|
searchParameterMap.add(Observation.SP_CATEGORY, buildTokenAndListParam(categoryParam));
|
||||||
|
codeParam = new TokenParam("http://mycodes.org/fhir/observation-code", "test-code-1");
|
||||||
|
searchParameterMap.add(Observation.SP_CODE, buildTokenAndListParam(codeParam));
|
||||||
|
searchParameterMap.setLastNMax(100);
|
||||||
|
|
||||||
|
observations = elasticsearchSvc.executeLastN(searchParameterMap, myFhirContext, 100);
|
||||||
|
assertEquals(0, observations.size());
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private void createMultiplePatientsAndObservations() throws IOException {
|
||||||
|
// Create CodeableConcepts for two Codes, each with three codings.
|
||||||
|
String codeableConceptId1 = UUID.randomUUID().toString();
|
||||||
|
CodeableConcept codeableConceptField1 = new CodeableConcept().setText("Test Codeable Concept Field for First Code");
|
||||||
|
codeableConceptField1.addCoding(new Coding("http://mycodes.org/fhir/observation-code", "test-code-1", "test-code-1 display"));
|
||||||
|
CodeJson codeJson1 = new CodeJson(codeableConceptField1, codeableConceptId1);
|
||||||
|
String codeJson1Document = ourMapperNonPrettyPrint.writeValueAsString(codeJson1);
|
||||||
|
|
||||||
|
String codeableConceptId2 = UUID.randomUUID().toString();
|
||||||
|
CodeableConcept codeableConceptField2 = new CodeableConcept().setText("Test Codeable Concept Field for Second Code");
|
||||||
|
codeableConceptField2.addCoding(new Coding("http://mycodes.org/fhir/observation-code", "test-code-2", "test-code-2 display"));
|
||||||
|
CodeJson codeJson2 = new CodeJson(codeableConceptField2, codeableConceptId2);
|
||||||
|
String codeJson2Document = ourMapperNonPrettyPrint.writeValueAsString(codeJson2);
|
||||||
|
|
||||||
|
// Create CodeableConcepts for two categories, each with three codings.
|
||||||
|
List<Coding> category1 = new ArrayList<>();
|
||||||
|
// Create three codings and first category CodeableConcept
|
||||||
|
category1.add(new Coding("http://mycodes.org/fhir/observation-category", "test-heart-rate", "test-heart-rate display"));
|
||||||
|
category1.add(new Coding("http://myalternatecodes.org/fhir/observation-category", "test-alt-heart-rate", "test-alt-heart-rate display"));
|
||||||
|
category1.add(new Coding("http://mysecondaltcodes.org/fhir/observation-category", "test-2nd-alt-heart-rate", "test-2nd-alt-heart-rate display"));
|
||||||
|
List<CodeableConcept> categoryConcepts1 = new ArrayList<>();
|
||||||
|
CodeableConcept categoryCodeableConcept1 = new CodeableConcept().setText("Test Codeable Concept Field for first category");
|
||||||
|
categoryCodeableConcept1.setCoding(category1);
|
||||||
|
categoryConcepts1.add(categoryCodeableConcept1);
|
||||||
|
// Create three codings and second category CodeableConcept
|
||||||
|
List<Coding> category2 = new ArrayList<>();
|
||||||
|
category2.add(new Coding("http://mycodes.org/fhir/observation-category", "test-vital-signs", "test-vital-signs display"));
|
||||||
|
category2.add(new Coding("http://myalternatecodes.org/fhir/observation-category", "test-alt-vitals", "test-alt-vitals display"));
|
||||||
|
category2.add(new Coding("http://mysecondaltcodes.org/fhir/observation-category", "test-2nd-alt-vitals", "test-2nd-alt-vitals display"));
|
||||||
|
List<CodeableConcept> categoryConcepts2 = new ArrayList<>();
|
||||||
|
CodeableConcept categoryCodeableConcept2 = new CodeableConcept().setText("Test Codeable Concept Field for second category");
|
||||||
|
categoryCodeableConcept2.setCoding(category2);
|
||||||
|
categoryConcepts2.add(categoryCodeableConcept2);
|
||||||
|
|
||||||
|
for (int patientCount = 0; patientCount < 10; patientCount++) {
|
||||||
|
|
||||||
|
String subject = String.valueOf(patientCount);
|
||||||
|
|
||||||
|
for (int entryCount = 0; entryCount < 10; entryCount++) {
|
||||||
|
|
||||||
|
ObservationJson observationJson = new ObservationJson();
|
||||||
|
String identifier = String.valueOf((entryCount + patientCount * 10));
|
||||||
|
observationJson.setIdentifier(identifier);
|
||||||
|
observationJson.setSubject(subject);
|
||||||
|
|
||||||
|
if (entryCount % 2 == 1) {
|
||||||
|
observationJson.setCategories(categoryConcepts1);
|
||||||
|
observationJson.setCode(codeableConceptField1);
|
||||||
|
observationJson.setCode_concept_id(codeableConceptId1);
|
||||||
|
assertTrue(elasticsearchSvc.performIndex(ElasticsearchSvcImpl.OBSERVATION_CODE_INDEX, codeableConceptId1, codeJson1Document, ElasticsearchSvcImpl.CODE_DOCUMENT_TYPE));
|
||||||
|
} else {
|
||||||
|
observationJson.setCategories(categoryConcepts2);
|
||||||
|
observationJson.setCode(codeableConceptField2);
|
||||||
|
observationJson.setCode_concept_id(codeableConceptId2);
|
||||||
|
assertTrue(elasticsearchSvc.performIndex(ElasticsearchSvcImpl.OBSERVATION_CODE_INDEX, codeableConceptId2, codeJson2Document, ElasticsearchSvcImpl.CODE_DOCUMENT_TYPE));
|
||||||
|
}
|
||||||
|
|
||||||
|
Calendar observationDate = new GregorianCalendar();
|
||||||
|
observationDate.add(Calendar.HOUR, -10 + entryCount);
|
||||||
|
Date effectiveDtm = observationDate.getTime();
|
||||||
|
observationJson.setEffectiveDtm(effectiveDtm);
|
||||||
|
|
||||||
|
String observationDocument = ourMapperNonPrettyPrint.writeValueAsString(observationJson);
|
||||||
|
assertTrue(elasticsearchSvc.performIndex(ElasticsearchSvcImpl.OBSERVATION_INDEX, identifier, observationDocument, ElasticsearchSvcImpl.OBSERVATION_DOCUMENT_TYPE));
|
||||||
|
|
||||||
|
if (createdPatientObservationMap.containsKey(subject)) {
|
||||||
|
Map<String, List<Date>> observationCodeMap = createdPatientObservationMap.get(subject);
|
||||||
|
if (observationCodeMap.containsKey(observationJson.getCode_concept_id())) {
|
||||||
|
List<Date> observationDates = observationCodeMap.get(observationJson.getCode_concept_id());
|
||||||
|
// Want dates to be sorted in descending order
|
||||||
|
observationDates.add(0, effectiveDtm);
|
||||||
|
// Only keep the three most recent dates for later check.
|
||||||
|
if (observationDates.size() > 3) {
|
||||||
|
observationDates.remove(3);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
ArrayList<Date> observationDates = new ArrayList<>();
|
||||||
|
observationDates.add(effectiveDtm);
|
||||||
|
observationCodeMap.put(observationJson.getCode_concept_id(), observationDates);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
ArrayList<Date> observationDates = new ArrayList<>();
|
||||||
|
observationDates.add(effectiveDtm);
|
||||||
|
Map<String, List<Date>> codeObservationMap = new HashMap<>();
|
||||||
|
codeObservationMap.put(observationJson.getCode_concept_id(), observationDates);
|
||||||
|
createdPatientObservationMap.put(subject, codeObservationMap);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
Thread.sleep(2000L);
|
||||||
|
} catch (InterruptedException theE) {
|
||||||
|
theE.printStackTrace();
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testLastNNoParamsQuery() {
|
||||||
|
SearchParameterMap searchParameterMap = new SearchParameterMap();
|
||||||
|
searchParameterMap.setLastNMax(1);
|
||||||
|
List<ObservationJson> observations = elasticsearchSvc.executeLastNWithAllFields(searchParameterMap, myFhirContext);
|
||||||
|
|
||||||
|
assertEquals(2, observations.size());
|
||||||
|
|
||||||
|
String observationCode1 = observations.get(0).getCode_coding_code_system_hash();
|
||||||
|
String observationCode2 = observations.get(1).getCode_coding_code_system_hash();
|
||||||
|
|
||||||
|
assertNotEquals(observationCode1, observationCode2);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,312 @@
|
||||||
|
package ca.uhn.fhir.jpa.search.lastn;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.context.FhirContext;
|
||||||
|
import ca.uhn.fhir.jpa.model.util.CodeSystemHash;
|
||||||
|
import ca.uhn.fhir.jpa.search.lastn.config.TestElasticsearchConfig;
|
||||||
|
import ca.uhn.fhir.jpa.search.lastn.json.CodeJson;
|
||||||
|
import ca.uhn.fhir.jpa.search.lastn.json.ObservationJson;
|
||||||
|
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||||
|
import ca.uhn.fhir.model.dstu2.resource.Observation;
|
||||||
|
import ca.uhn.fhir.rest.param.ReferenceAndListParam;
|
||||||
|
import ca.uhn.fhir.rest.param.ReferenceOrListParam;
|
||||||
|
import ca.uhn.fhir.rest.param.ReferenceParam;
|
||||||
|
import ca.uhn.fhir.rest.param.TokenAndListParam;
|
||||||
|
import ca.uhn.fhir.rest.param.TokenOrListParam;
|
||||||
|
import ca.uhn.fhir.rest.param.TokenParam;
|
||||||
|
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||||
|
import com.fasterxml.jackson.core.JsonGenerator;
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
import com.fasterxml.jackson.databind.SerializationFeature;
|
||||||
|
import org.hl7.fhir.r4.model.CodeableConcept;
|
||||||
|
import org.hl7.fhir.r4.model.Coding;
|
||||||
|
import org.junit.After;
|
||||||
|
import org.junit.BeforeClass;
|
||||||
|
import org.junit.Test;
|
||||||
|
import org.junit.runner.RunWith;
|
||||||
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
import org.springframework.test.context.ContextConfiguration;
|
||||||
|
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Date;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.UUID;
|
||||||
|
|
||||||
|
import static org.junit.Assert.assertEquals;
|
||||||
|
import static org.junit.Assert.assertTrue;
|
||||||
|
|
||||||
|
@RunWith(SpringJUnit4ClassRunner.class)
|
||||||
|
@ContextConfiguration(classes = {TestElasticsearchConfig.class})
|
||||||
|
public class LastNElasticsearchSvcSingleObservationIT {
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
ElasticsearchSvcImpl elasticsearchSvc;
|
||||||
|
|
||||||
|
static ObjectMapper ourMapperNonPrettyPrint;
|
||||||
|
|
||||||
|
final String RESOURCEPID = "123";
|
||||||
|
final String SUBJECTID = "Patient/4567";
|
||||||
|
final Date EFFECTIVEDTM = new Date();
|
||||||
|
final String FIRSTCATEGORYTEXT = "Test Codeable Concept Field for first category";
|
||||||
|
final String CATEGORYFIRSTCODINGSYSTEM = "http://mycodes.org/fhir/observation-category";
|
||||||
|
final String CATEGORYSECONDCODINGSYSTEM = "http://myalternatecodes.org/fhir/observation-category";
|
||||||
|
final String CATEGORYTHIRDCODINGSYSTEM = "http://mysecondaltcodes.org/fhir/observation-category";
|
||||||
|
final String FIRSTCATEGORYFIRSTCODINGCODE = "test-heart-rate";
|
||||||
|
final String FIRSTCATEGORYFIRSTCODINGDISPLAY = "test-heart-rate display";
|
||||||
|
final String FIRSTCATEGORYSECONDCODINGCODE = "test-alt-heart-rate";
|
||||||
|
final String FIRSTCATEGORYSECONDCODINGDISPLAY = "test-alt-heart-rate display";
|
||||||
|
final String FIRSTCATEGORYTHIRDCODINGCODE = "test-2nd-alt-heart-rate";
|
||||||
|
final String FIRSTCATEGORYTHIRDCODINGDISPLAY = "test-2nd-alt-heart-rate display";
|
||||||
|
final String SECONDCATEGORYTEXT = "Test Codeable Concept Field for for second category";
|
||||||
|
final String SECONDCATEGORYFIRSTCODINGCODE = "test-vital-signs";
|
||||||
|
final String SECONDCATEGORYFIRSTCODINGDISPLAY = "test-vital-signs display";
|
||||||
|
final String SECONDCATEGORYSECONDCODINGCODE = "test-alt-vitals";
|
||||||
|
final String SECONDCATEGORYSECONDCODINGDISPLAY = "test-alt-vitals display";
|
||||||
|
final String SECONDCATEGORYTHIRDCODINGCODE = "test-2nd-alt-vitals";
|
||||||
|
final String SECONDCATEGORYTHIRDCODINGDISPLAY = "test-2nd-alt-vitals display";
|
||||||
|
final String THIRDCATEGORYTEXT = "Test Codeable Concept Field for third category";
|
||||||
|
final String THIRDCATEGORYFIRSTCODINGCODE = "test-vital-panel";
|
||||||
|
final String THIRDCATEGORYFIRSTCODINGDISPLAY = "test-vitals-panel display";
|
||||||
|
final String THIRDCATEGORYSECONDCODINGCODE = "test-alt-vitals-panel";
|
||||||
|
final String THIRDCATEGORYSECONDCODINGDISPLAY = "test-alt-vitals display";
|
||||||
|
final String THIRDCATEGORYTHIRDCODINGCODE = "test-2nd-alt-vitals-panel";
|
||||||
|
final String THIRDCATEGORYTHIRDCODINGDISPLAY = "test-2nd-alt-vitals-panel display";
|
||||||
|
|
||||||
|
final String OBSERVATIONSINGLECODEID = UUID.randomUUID().toString();
|
||||||
|
final String OBSERVATIONCODETEXT = "Test Codeable Concept Field for Code";
|
||||||
|
final String CODEFIRSTCODINGSYSTEM = "http://mycodes.org/fhir/observation-code";
|
||||||
|
final String CODEFIRSTCODINGCODE = "test-code";
|
||||||
|
final String CODEFIRSTCODINGDISPLAY = "test-code display";
|
||||||
|
|
||||||
|
final FhirContext myFhirContext = FhirContext.forR4();
|
||||||
|
|
||||||
|
@BeforeClass
|
||||||
|
public static void beforeClass() {
|
||||||
|
ourMapperNonPrettyPrint = new ObjectMapper();
|
||||||
|
ourMapperNonPrettyPrint.setSerializationInclusion(JsonInclude.Include.NON_NULL);
|
||||||
|
ourMapperNonPrettyPrint.disable(SerializationFeature.INDENT_OUTPUT);
|
||||||
|
ourMapperNonPrettyPrint.disable(JsonGenerator.Feature.AUTO_CLOSE_TARGET);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@After
|
||||||
|
public void after() throws IOException {
|
||||||
|
elasticsearchSvc.deleteAllDocuments(ElasticsearchSvcImpl.OBSERVATION_INDEX);
|
||||||
|
elasticsearchSvc.deleteAllDocuments(ElasticsearchSvcImpl.OBSERVATION_CODE_INDEX);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testSingleObservationQuery() throws IOException {
|
||||||
|
|
||||||
|
createSingleObservation();
|
||||||
|
|
||||||
|
SearchParameterMap searchParameterMap = new SearchParameterMap();
|
||||||
|
ReferenceParam subjectParam = new ReferenceParam("Patient", "", SUBJECTID);
|
||||||
|
searchParameterMap.add(Observation.SP_SUBJECT, new ReferenceAndListParam().addAnd(new ReferenceOrListParam().addOr(subjectParam)));
|
||||||
|
TokenParam categoryParam = new TokenParam(CATEGORYFIRSTCODINGSYSTEM, FIRSTCATEGORYFIRSTCODINGCODE);
|
||||||
|
searchParameterMap.add(Observation.SP_CATEGORY, new TokenAndListParam().addAnd(new TokenOrListParam().addOr(categoryParam)));
|
||||||
|
TokenParam codeParam = new TokenParam(CODEFIRSTCODINGSYSTEM, CODEFIRSTCODINGCODE);
|
||||||
|
searchParameterMap.add(Observation.SP_CODE, new TokenAndListParam().addAnd(new TokenOrListParam().addOr(codeParam)));
|
||||||
|
|
||||||
|
searchParameterMap.setLastNMax(3);
|
||||||
|
|
||||||
|
// execute Observation ID search
|
||||||
|
List<String> observationIdsOnly = elasticsearchSvc.executeLastN(searchParameterMap, myFhirContext, 100);
|
||||||
|
|
||||||
|
assertEquals(1, observationIdsOnly.size());
|
||||||
|
assertEquals(RESOURCEPID, observationIdsOnly.get(0));
|
||||||
|
|
||||||
|
// execute Observation search for all search fields
|
||||||
|
List<ObservationJson> observations = elasticsearchSvc.executeLastNWithAllFields(searchParameterMap, myFhirContext);
|
||||||
|
|
||||||
|
validateFullObservationSearch(observations);
|
||||||
|
}
|
||||||
|
|
||||||
|
private void validateFullObservationSearch(List<ObservationJson> observations) throws IOException {
|
||||||
|
|
||||||
|
assertEquals(1, observations.size());
|
||||||
|
ObservationJson observation = observations.get(0);
|
||||||
|
assertEquals(RESOURCEPID, observation.getIdentifier());
|
||||||
|
|
||||||
|
assertEquals(SUBJECTID, observation.getSubject());
|
||||||
|
assertEquals(RESOURCEPID, observation.getIdentifier());
|
||||||
|
assertEquals(EFFECTIVEDTM, observation.getEffectiveDtm());
|
||||||
|
assertEquals(OBSERVATIONSINGLECODEID, observation.getCode_concept_id());
|
||||||
|
|
||||||
|
List<String> category_concept_text_values = observation.getCategory_concept_text();
|
||||||
|
assertEquals(3, category_concept_text_values.size());
|
||||||
|
assertEquals(FIRSTCATEGORYTEXT, category_concept_text_values.get(0));
|
||||||
|
assertEquals(SECONDCATEGORYTEXT, category_concept_text_values.get(1));
|
||||||
|
assertEquals(THIRDCATEGORYTEXT, category_concept_text_values.get(2));
|
||||||
|
|
||||||
|
List<List<String>> category_codings_systems = observation.getCategory_coding_system();
|
||||||
|
assertEquals(3, category_codings_systems.size());
|
||||||
|
List<String> category_coding_systems = category_codings_systems.get(0);
|
||||||
|
assertEquals(3, category_coding_systems.size());
|
||||||
|
assertEquals(CATEGORYFIRSTCODINGSYSTEM, category_coding_systems.get(0));
|
||||||
|
assertEquals(CATEGORYSECONDCODINGSYSTEM, category_coding_systems.get(1));
|
||||||
|
assertEquals(CATEGORYTHIRDCODINGSYSTEM, category_coding_systems.get(2));
|
||||||
|
category_coding_systems = category_codings_systems.get(1);
|
||||||
|
assertEquals(3, category_coding_systems.size());
|
||||||
|
assertEquals(CATEGORYFIRSTCODINGSYSTEM, category_coding_systems.get(0));
|
||||||
|
assertEquals(CATEGORYSECONDCODINGSYSTEM, category_coding_systems.get(1));
|
||||||
|
assertEquals(CATEGORYTHIRDCODINGSYSTEM, category_coding_systems.get(2));
|
||||||
|
category_coding_systems = category_codings_systems.get(2);
|
||||||
|
assertEquals(3, category_coding_systems.size());
|
||||||
|
assertEquals(CATEGORYFIRSTCODINGSYSTEM, category_coding_systems.get(0));
|
||||||
|
assertEquals(CATEGORYSECONDCODINGSYSTEM, category_coding_systems.get(1));
|
||||||
|
assertEquals(CATEGORYTHIRDCODINGSYSTEM, category_coding_systems.get(2));
|
||||||
|
|
||||||
|
List<List<String>> category_codings_codes = observation.getCategory_coding_code();
|
||||||
|
assertEquals(3, category_codings_codes.size());
|
||||||
|
List<String> category_coding_codes = category_codings_codes.get(0);
|
||||||
|
assertEquals(3, category_coding_codes.size());
|
||||||
|
assertEquals(FIRSTCATEGORYFIRSTCODINGCODE, category_coding_codes.get(0));
|
||||||
|
assertEquals(FIRSTCATEGORYSECONDCODINGCODE, category_coding_codes.get(1));
|
||||||
|
assertEquals(FIRSTCATEGORYTHIRDCODINGCODE, category_coding_codes.get(2));
|
||||||
|
category_coding_codes = category_codings_codes.get(1);
|
||||||
|
assertEquals(3, category_coding_codes.size());
|
||||||
|
assertEquals(SECONDCATEGORYFIRSTCODINGCODE, category_coding_codes.get(0));
|
||||||
|
assertEquals(SECONDCATEGORYSECONDCODINGCODE, category_coding_codes.get(1));
|
||||||
|
assertEquals(SECONDCATEGORYTHIRDCODINGCODE, category_coding_codes.get(2));
|
||||||
|
category_coding_codes = category_codings_codes.get(2);
|
||||||
|
assertEquals(3, category_coding_codes.size());
|
||||||
|
assertEquals(THIRDCATEGORYFIRSTCODINGCODE, category_coding_codes.get(0));
|
||||||
|
assertEquals(THIRDCATEGORYSECONDCODINGCODE, category_coding_codes.get(1));
|
||||||
|
assertEquals(THIRDCATEGORYTHIRDCODINGCODE, category_coding_codes.get(2));
|
||||||
|
|
||||||
|
List<List<String>> category_codings_displays = observation.getCategory_coding_display();
|
||||||
|
assertEquals(3, category_codings_displays.size());
|
||||||
|
List<String> category_coding_displays = category_codings_displays.get(0);
|
||||||
|
assertEquals(FIRSTCATEGORYFIRSTCODINGDISPLAY, category_coding_displays.get(0));
|
||||||
|
assertEquals(FIRSTCATEGORYSECONDCODINGDISPLAY, category_coding_displays.get(1));
|
||||||
|
assertEquals(FIRSTCATEGORYTHIRDCODINGDISPLAY, category_coding_displays.get(2));
|
||||||
|
category_coding_displays = category_codings_displays.get(1);
|
||||||
|
assertEquals(3, category_coding_displays.size());
|
||||||
|
assertEquals(SECONDCATEGORYFIRSTCODINGDISPLAY, category_coding_displays.get(0));
|
||||||
|
assertEquals(SECONDCATEGORYSECONDCODINGDISPLAY, category_coding_displays.get(1));
|
||||||
|
assertEquals(SECONDCATEGORYTHIRDCODINGDISPLAY, category_coding_displays.get(2));
|
||||||
|
category_coding_displays = category_codings_displays.get(2);
|
||||||
|
assertEquals(3, category_coding_displays.size());
|
||||||
|
assertEquals(THIRDCATEGORYFIRSTCODINGDISPLAY, category_coding_displays.get(0));
|
||||||
|
assertEquals(THIRDCATEGORYSECONDCODINGDISPLAY, category_coding_displays.get(1));
|
||||||
|
assertEquals(THIRDCATEGORYTHIRDCODINGDISPLAY, category_coding_displays.get(2));
|
||||||
|
|
||||||
|
List<List<String>> category_codings_code_system_hashes = observation.getCategory_coding_code_system_hash();
|
||||||
|
assertEquals(3, category_codings_code_system_hashes.size());
|
||||||
|
List<String> category_coding_code_system_hashes = category_codings_code_system_hashes.get(0);
|
||||||
|
assertEquals(String.valueOf(CodeSystemHash.hashCodeSystem(CATEGORYFIRSTCODINGSYSTEM, FIRSTCATEGORYFIRSTCODINGCODE)), category_coding_code_system_hashes.get(0));
|
||||||
|
assertEquals(String.valueOf(CodeSystemHash.hashCodeSystem(CATEGORYSECONDCODINGSYSTEM, FIRSTCATEGORYSECONDCODINGCODE)), category_coding_code_system_hashes.get(1));
|
||||||
|
assertEquals(String.valueOf(CodeSystemHash.hashCodeSystem(CATEGORYTHIRDCODINGSYSTEM, FIRSTCATEGORYTHIRDCODINGCODE)), category_coding_code_system_hashes.get(2));
|
||||||
|
category_coding_code_system_hashes = category_codings_code_system_hashes.get(1);
|
||||||
|
assertEquals(3, category_coding_code_system_hashes.size());
|
||||||
|
assertEquals(String.valueOf(CodeSystemHash.hashCodeSystem(CATEGORYFIRSTCODINGSYSTEM, SECONDCATEGORYFIRSTCODINGCODE)), category_coding_code_system_hashes.get(0));
|
||||||
|
assertEquals(String.valueOf(CodeSystemHash.hashCodeSystem(CATEGORYSECONDCODINGSYSTEM, SECONDCATEGORYSECONDCODINGCODE)), category_coding_code_system_hashes.get(1));
|
||||||
|
assertEquals(String.valueOf(CodeSystemHash.hashCodeSystem(CATEGORYTHIRDCODINGSYSTEM, SECONDCATEGORYTHIRDCODINGCODE)), category_coding_code_system_hashes.get(2));
|
||||||
|
category_coding_code_system_hashes = category_codings_code_system_hashes.get(2);
|
||||||
|
assertEquals(3, category_coding_code_system_hashes.size());
|
||||||
|
assertEquals(String.valueOf(CodeSystemHash.hashCodeSystem(CATEGORYFIRSTCODINGSYSTEM, THIRDCATEGORYFIRSTCODINGCODE)), category_coding_code_system_hashes.get(0));
|
||||||
|
assertEquals(String.valueOf(CodeSystemHash.hashCodeSystem(CATEGORYSECONDCODINGSYSTEM, THIRDCATEGORYSECONDCODINGCODE)), category_coding_code_system_hashes.get(1));
|
||||||
|
assertEquals(String.valueOf(CodeSystemHash.hashCodeSystem(CATEGORYTHIRDCODINGSYSTEM, THIRDCATEGORYTHIRDCODINGCODE)), category_coding_code_system_hashes.get(2));
|
||||||
|
|
||||||
|
String code_concept_text_values = observation.getCode_concept_text();
|
||||||
|
assertEquals(OBSERVATIONCODETEXT, code_concept_text_values);
|
||||||
|
|
||||||
|
String code_coding_systems = observation.getCode_coding_system();
|
||||||
|
assertEquals(CODEFIRSTCODINGSYSTEM, code_coding_systems);
|
||||||
|
|
||||||
|
String code_coding_codes = observation.getCode_coding_code();
|
||||||
|
assertEquals(CODEFIRSTCODINGCODE, code_coding_codes);
|
||||||
|
|
||||||
|
String code_coding_display = observation.getCode_coding_display();
|
||||||
|
assertEquals(CODEFIRSTCODINGDISPLAY, code_coding_display);
|
||||||
|
|
||||||
|
String code_coding_code_system_hash = observation.getCode_coding_code_system_hash();
|
||||||
|
assertEquals(String.valueOf(CodeSystemHash.hashCodeSystem(CODEFIRSTCODINGSYSTEM, CODEFIRSTCODINGCODE)), code_coding_code_system_hash);
|
||||||
|
|
||||||
|
// Retrieve all Observation codes
|
||||||
|
List<CodeJson> codes = elasticsearchSvc.queryAllIndexedObservationCodes();
|
||||||
|
assertEquals(1, codes.size());
|
||||||
|
CodeJson persistedObservationCode = codes.get(0);
|
||||||
|
|
||||||
|
String persistedCodeConceptID = persistedObservationCode.getCodeableConceptId();
|
||||||
|
assertEquals(OBSERVATIONSINGLECODEID, persistedCodeConceptID);
|
||||||
|
String persistedCodeConceptText = persistedObservationCode.getCodeableConceptText();
|
||||||
|
assertEquals(OBSERVATIONCODETEXT, persistedCodeConceptText);
|
||||||
|
|
||||||
|
List<String> persistedCodeCodingSystems = persistedObservationCode.getCoding_system();
|
||||||
|
assertEquals(1, persistedCodeCodingSystems.size());
|
||||||
|
assertEquals(CODEFIRSTCODINGSYSTEM, persistedCodeCodingSystems.get(0));
|
||||||
|
|
||||||
|
List<String> persistedCodeCodingCodes = persistedObservationCode.getCoding_code();
|
||||||
|
assertEquals(1, persistedCodeCodingCodes.size());
|
||||||
|
assertEquals(CODEFIRSTCODINGCODE, persistedCodeCodingCodes.get(0));
|
||||||
|
|
||||||
|
List<String> persistedCodeCodingDisplays = persistedObservationCode.getCoding_display();
|
||||||
|
assertEquals(1, persistedCodeCodingDisplays.size());
|
||||||
|
assertEquals(CODEFIRSTCODINGDISPLAY, persistedCodeCodingDisplays.get(0));
|
||||||
|
|
||||||
|
List<String> persistedCodeCodingCodeSystemHashes = persistedObservationCode.getCoding_code_system_hash();
|
||||||
|
assertEquals(1, persistedCodeCodingCodeSystemHashes.size());
|
||||||
|
assertEquals(String.valueOf(CodeSystemHash.hashCodeSystem(CODEFIRSTCODINGSYSTEM, CODEFIRSTCODINGCODE)), persistedCodeCodingCodeSystemHashes.get(0));
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private void createSingleObservation() throws IOException {
|
||||||
|
ObservationJson indexedObservation = new ObservationJson();
|
||||||
|
indexedObservation.setIdentifier(RESOURCEPID);
|
||||||
|
indexedObservation.setSubject(SUBJECTID);
|
||||||
|
indexedObservation.setEffectiveDtm(EFFECTIVEDTM);
|
||||||
|
|
||||||
|
// Add three CodeableConcepts for category
|
||||||
|
List<CodeableConcept> categoryConcepts = new ArrayList<>();
|
||||||
|
// Create three codings and first category CodeableConcept
|
||||||
|
List<Coding> category1 = new ArrayList<>();
|
||||||
|
CodeableConcept categoryCodeableConcept1 = new CodeableConcept().setText(FIRSTCATEGORYTEXT);
|
||||||
|
category1.add(new Coding(CATEGORYFIRSTCODINGSYSTEM, FIRSTCATEGORYFIRSTCODINGCODE, FIRSTCATEGORYFIRSTCODINGDISPLAY));
|
||||||
|
category1.add(new Coding(CATEGORYSECONDCODINGSYSTEM, FIRSTCATEGORYSECONDCODINGCODE, FIRSTCATEGORYSECONDCODINGDISPLAY));
|
||||||
|
category1.add(new Coding(CATEGORYTHIRDCODINGSYSTEM, FIRSTCATEGORYTHIRDCODINGCODE, FIRSTCATEGORYTHIRDCODINGDISPLAY));
|
||||||
|
categoryCodeableConcept1.setCoding(category1);
|
||||||
|
categoryConcepts.add(categoryCodeableConcept1);
|
||||||
|
// Create three codings and second category CodeableConcept
|
||||||
|
List<Coding> category2 = new ArrayList<>();
|
||||||
|
CodeableConcept categoryCodeableConcept2 = new CodeableConcept().setText(SECONDCATEGORYTEXT);
|
||||||
|
category2.add(new Coding(CATEGORYFIRSTCODINGSYSTEM, SECONDCATEGORYFIRSTCODINGCODE, SECONDCATEGORYFIRSTCODINGDISPLAY));
|
||||||
|
category2.add(new Coding(CATEGORYSECONDCODINGSYSTEM, SECONDCATEGORYSECONDCODINGCODE, SECONDCATEGORYSECONDCODINGDISPLAY));
|
||||||
|
category2.add(new Coding(CATEGORYTHIRDCODINGSYSTEM, SECONDCATEGORYTHIRDCODINGCODE, SECONDCATEGORYTHIRDCODINGDISPLAY));
|
||||||
|
categoryCodeableConcept2.setCoding(category2);
|
||||||
|
categoryConcepts.add(categoryCodeableConcept2);
|
||||||
|
// Create three codings and third category CodeableConcept
|
||||||
|
List<Coding> category3 = new ArrayList<>();
|
||||||
|
CodeableConcept categoryCodeableConcept3 = new CodeableConcept().setText(THIRDCATEGORYTEXT);
|
||||||
|
category3.add(new Coding(CATEGORYFIRSTCODINGSYSTEM, THIRDCATEGORYFIRSTCODINGCODE, THIRDCATEGORYFIRSTCODINGDISPLAY));
|
||||||
|
category3.add(new Coding(CATEGORYSECONDCODINGSYSTEM, THIRDCATEGORYSECONDCODINGCODE, THIRDCATEGORYSECONDCODINGDISPLAY));
|
||||||
|
category3.add(new Coding(CATEGORYTHIRDCODINGSYSTEM, THIRDCATEGORYTHIRDCODINGCODE, THIRDCATEGORYTHIRDCODINGDISPLAY));
|
||||||
|
categoryCodeableConcept3.setCoding(category3);
|
||||||
|
categoryConcepts.add(categoryCodeableConcept3);
|
||||||
|
indexedObservation.setCategories(categoryConcepts);
|
||||||
|
|
||||||
|
// Create CodeableConcept for Code with three codings.
|
||||||
|
indexedObservation.setCode_concept_id(OBSERVATIONSINGLECODEID);
|
||||||
|
CodeableConcept codeableConceptField = new CodeableConcept().setText(OBSERVATIONCODETEXT);
|
||||||
|
codeableConceptField.addCoding(new Coding(CODEFIRSTCODINGSYSTEM, CODEFIRSTCODINGCODE, CODEFIRSTCODINGDISPLAY));
|
||||||
|
indexedObservation.setCode(codeableConceptField);
|
||||||
|
|
||||||
|
String observationDocument = ourMapperNonPrettyPrint.writeValueAsString(indexedObservation);
|
||||||
|
assertTrue(elasticsearchSvc.performIndex(ElasticsearchSvcImpl.OBSERVATION_INDEX, RESOURCEPID, observationDocument, ElasticsearchSvcImpl.OBSERVATION_DOCUMENT_TYPE));
|
||||||
|
|
||||||
|
CodeJson observationCode = new CodeJson(codeableConceptField, OBSERVATIONSINGLECODEID);
|
||||||
|
String codeDocument = ourMapperNonPrettyPrint.writeValueAsString(observationCode);
|
||||||
|
assertTrue(elasticsearchSvc.performIndex(ElasticsearchSvcImpl.OBSERVATION_CODE_INDEX, OBSERVATIONSINGLECODEID, codeDocument, ElasticsearchSvcImpl.CODE_DOCUMENT_TYPE));
|
||||||
|
|
||||||
|
try {
|
||||||
|
Thread.sleep(1000L);
|
||||||
|
} catch (InterruptedException theE) {
|
||||||
|
theE.printStackTrace();
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,50 @@
|
||||||
|
package ca.uhn.fhir.jpa.search.lastn.config;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.context.ConfigurationException;
|
||||||
|
import ca.uhn.fhir.jpa.search.lastn.ElasticsearchSvcImpl;
|
||||||
|
import org.springframework.context.annotation.Bean;
|
||||||
|
import org.springframework.context.annotation.Configuration;
|
||||||
|
import pl.allegro.tech.embeddedelasticsearch.EmbeddedElastic;
|
||||||
|
import pl.allegro.tech.embeddedelasticsearch.PopularProperties;
|
||||||
|
|
||||||
|
import javax.annotation.PreDestroy;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.UUID;
|
||||||
|
import java.util.concurrent.TimeUnit;
|
||||||
|
|
||||||
|
@Configuration
|
||||||
|
public class TestElasticsearchConfig {
|
||||||
|
|
||||||
|
private final String elasticsearchHost = "localhost";
|
||||||
|
private final String elasticsearchUserId = "";
|
||||||
|
private final String elasticsearchPassword = "";
|
||||||
|
|
||||||
|
private static final String ELASTIC_VERSION = "6.5.4";
|
||||||
|
|
||||||
|
|
||||||
|
@Bean()
|
||||||
|
public ElasticsearchSvcImpl myElasticsearchSvc() throws IOException {
|
||||||
|
int elasticsearchPort = embeddedElasticSearch().getHttpPort();
|
||||||
|
return new ElasticsearchSvcImpl(elasticsearchHost, elasticsearchPort, elasticsearchUserId, elasticsearchPassword);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public EmbeddedElastic embeddedElasticSearch() {
|
||||||
|
EmbeddedElastic embeddedElastic = null;
|
||||||
|
try {
|
||||||
|
embeddedElastic = EmbeddedElastic.builder()
|
||||||
|
.withElasticVersion(ELASTIC_VERSION)
|
||||||
|
.withSetting(PopularProperties.TRANSPORT_TCP_PORT, 0)
|
||||||
|
.withSetting(PopularProperties.HTTP_PORT, 0)
|
||||||
|
.withSetting(PopularProperties.CLUSTER_NAME, UUID.randomUUID())
|
||||||
|
.withStartTimeout(60, TimeUnit.SECONDS)
|
||||||
|
.build()
|
||||||
|
.start();
|
||||||
|
} catch (IOException | InterruptedException e) {
|
||||||
|
throw new ConfigurationException(e);
|
||||||
|
}
|
||||||
|
|
||||||
|
return embeddedElastic;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
File diff suppressed because it is too large
Load Diff
|
@ -104,9 +104,6 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
||||||
|
|
||||||
empiLink.addIndex("20200517.5", "IDX_EMPI_PERSON_TGT").unique(true).withColumns("PERSON_PID", "TARGET_PID");
|
empiLink.addIndex("20200517.5", "IDX_EMPI_PERSON_TGT").unique(true).withColumns("PERSON_PID", "TARGET_PID");
|
||||||
|
|
||||||
// TRM_CONCEPT_PROPERTY
|
|
||||||
// version.onTable("TRM_CONCEPT_PROPERTY").addIndex("20200523.1", "IDX_CONCEPTPROP_CONCEPTPID").unique(false).withColumns("CONCEPT_PID");
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
protected void init500() { // 20200218 - 20200519
|
protected void init500() { // 20200218 - 20200519
|
||||||
|
|
|
@ -20,13 +20,17 @@ package ca.uhn.fhir.jpa.model.entity;
|
||||||
* #L%
|
* #L%
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
import ca.uhn.fhir.model.api.TemporalPrecisionEnum;
|
||||||
import com.google.common.annotations.VisibleForTesting;
|
import com.google.common.annotations.VisibleForTesting;
|
||||||
import org.apache.commons.lang3.ObjectUtils;
|
import org.apache.commons.lang3.ObjectUtils;
|
||||||
import org.apache.commons.lang3.Validate;
|
import org.apache.commons.lang3.Validate;
|
||||||
import org.hl7.fhir.dstu2.model.Subscription;
|
import org.hl7.fhir.dstu2.model.Subscription;
|
||||||
|
import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
||||||
|
import org.hl7.fhir.r4.model.DateTimeType;
|
||||||
|
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
|
import java.util.Date;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
|
||||||
|
@ -50,6 +54,21 @@ public class ModelConfig {
|
||||||
"http://hl7.org/fhir/StructureDefinition/*")));
|
"http://hl7.org/fhir/StructureDefinition/*")));
|
||||||
|
|
||||||
public static final String DEFAULT_WEBSOCKET_CONTEXT_PATH = "/websocket";
|
public static final String DEFAULT_WEBSOCKET_CONTEXT_PATH = "/websocket";
|
||||||
|
|
||||||
|
/*
|
||||||
|
* <p>
|
||||||
|
* Note the following database documented limitations:
|
||||||
|
* <ul>
|
||||||
|
* <li>JDBC Timestamp Datatype Low Value -4713 and High Value 9999</li>
|
||||||
|
* <li>MySQL 8: the range for DATETIME values is '1000-01-01 00:00:00.000000' to '9999-12-31 23:59:59.999999`</li>
|
||||||
|
* <li>Postgresql 12: Timestamp [without time zone] Low Value 4713 BC and High Value 294276 AD</li>
|
||||||
|
* <li>Oracle: Timestamp Low Value 4712 BC and High Value 9999 CE</li>
|
||||||
|
* <li>H2: datetime2 Low Value -4713 and High Value 9999</li>
|
||||||
|
* </ul>
|
||||||
|
* </p>
|
||||||
|
*/
|
||||||
|
protected static final String DEFAULT_PERIOD_INDEX_START_OF_TIME = "1001-01-01";
|
||||||
|
protected static final String DEFAULT_PERIOD_INDEX_END_OF_TIME = "9000-01-01";
|
||||||
/**
|
/**
|
||||||
* update setter javadoc if default changes
|
* update setter javadoc if default changes
|
||||||
*/
|
*/
|
||||||
|
@ -67,11 +86,15 @@ public class ModelConfig {
|
||||||
private boolean myUseOrdinalDatesForDayPrecisionSearches = true;
|
private boolean myUseOrdinalDatesForDayPrecisionSearches = true;
|
||||||
private boolean mySuppressStringIndexingInTokens = false;
|
private boolean mySuppressStringIndexingInTokens = false;
|
||||||
|
|
||||||
|
private IPrimitiveType<Date> myPeriodIndexStartOfTime;
|
||||||
|
private IPrimitiveType<Date> myPeriodIndexEndOfTime;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Constructor
|
* Constructor
|
||||||
*/
|
*/
|
||||||
public ModelConfig() {
|
public ModelConfig() {
|
||||||
super();
|
setPeriodIndexStartOfTime(new DateTimeType(DEFAULT_PERIOD_INDEX_START_OF_TIME));
|
||||||
|
setPeriodIndexEndOfTime(new DateTimeType(DEFAULT_PERIOD_INDEX_END_OF_TIME));
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -373,8 +396,8 @@ public class ModelConfig {
|
||||||
/**
|
/**
|
||||||
* <p>
|
* <p>
|
||||||
* Should searches use the integer field {@code SP_VALUE_LOW_DATE_ORDINAL} and {@code SP_VALUE_HIGH_DATE_ORDINAL} in
|
* Should searches use the integer field {@code SP_VALUE_LOW_DATE_ORDINAL} and {@code SP_VALUE_HIGH_DATE_ORDINAL} in
|
||||||
* {@link ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamDate} when resolving searches where all predicates are using
|
* {@link ResourceIndexedSearchParamDate} when resolving searches where all predicates are using
|
||||||
* precision of {@link ca.uhn.fhir.model.api.TemporalPrecisionEnum#DAY}.
|
* precision of {@link TemporalPrecisionEnum#DAY}.
|
||||||
* <p>
|
* <p>
|
||||||
* For example, if enabled, the search of {@code Observation?date=2020-02-25} will cause the date to be collapsed down to an
|
* For example, if enabled, the search of {@code Observation?date=2020-02-25} will cause the date to be collapsed down to an
|
||||||
* integer representing the ordinal date {@code 20200225}. It would then be compared against {@link ResourceIndexedSearchParamDate#getValueLowDateOrdinal()}
|
* integer representing the ordinal date {@code 20200225}. It would then be compared against {@link ResourceIndexedSearchParamDate#getValueLowDateOrdinal()}
|
||||||
|
@ -392,8 +415,8 @@ public class ModelConfig {
|
||||||
/**
|
/**
|
||||||
* <p>
|
* <p>
|
||||||
* Should searches use the integer field {@code SP_VALUE_LOW_DATE_ORDINAL} and {@code SP_VALUE_HIGH_DATE_ORDINAL} in
|
* Should searches use the integer field {@code SP_VALUE_LOW_DATE_ORDINAL} and {@code SP_VALUE_HIGH_DATE_ORDINAL} in
|
||||||
* {@link ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamDate} when resolving searches where all predicates are using
|
* {@link ResourceIndexedSearchParamDate} when resolving searches where all predicates are using
|
||||||
* precision of {@link ca.uhn.fhir.model.api.TemporalPrecisionEnum#DAY}.
|
* precision of {@link TemporalPrecisionEnum#DAY}.
|
||||||
* <p>
|
* <p>
|
||||||
* For example, if enabled, the search of {@code Observation?date=2020-02-25} will cause the date to be collapsed down to an
|
* For example, if enabled, the search of {@code Observation?date=2020-02-25} will cause the date to be collapsed down to an
|
||||||
* ordinal {@code 20200225}. It would then be compared against {@link ResourceIndexedSearchParamDate#getValueLowDateOrdinal()}
|
* ordinal {@code 20200225}. It would then be compared against {@link ResourceIndexedSearchParamDate#getValueLowDateOrdinal()}
|
||||||
|
@ -417,6 +440,7 @@ public class ModelConfig {
|
||||||
* <li>Coding.display</li>
|
* <li>Coding.display</li>
|
||||||
* <li>Identifier.use.text</li>
|
* <li>Identifier.use.text</li>
|
||||||
* </ul>
|
* </ul>
|
||||||
|
*
|
||||||
* @since 5.0.0
|
* @since 5.0.0
|
||||||
*/
|
*/
|
||||||
public boolean isSuppressStringIndexingInTokens() {
|
public boolean isSuppressStringIndexingInTokens() {
|
||||||
|
@ -432,12 +456,124 @@ public class ModelConfig {
|
||||||
* <li>Coding.display</li>
|
* <li>Coding.display</li>
|
||||||
* <li>Identifier.use.text</li>
|
* <li>Identifier.use.text</li>
|
||||||
* </ul>
|
* </ul>
|
||||||
|
*
|
||||||
* @since 5.0.0
|
* @since 5.0.0
|
||||||
*/
|
*/
|
||||||
public void setSuppressStringIndexingInTokens(boolean theSuppressStringIndexingInTokens) {
|
public void setSuppressStringIndexingInTokens(boolean theSuppressStringIndexingInTokens) {
|
||||||
mySuppressStringIndexingInTokens = theSuppressStringIndexingInTokens;
|
mySuppressStringIndexingInTokens = theSuppressStringIndexingInTokens;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* When indexing a Period (e.g. Encounter.period) where the period has an upper bound
|
||||||
|
* but not a lower bound, a canned "start of time" value can be used as the lower bound
|
||||||
|
* in order to allow range searches to correctly identify all values in the range.
|
||||||
|
* <p>
|
||||||
|
* The default value for this is {@link #DEFAULT_PERIOD_INDEX_START_OF_TIME} which
|
||||||
|
* is probably good enough for almost any application, but this can be changed if
|
||||||
|
* needed.
|
||||||
|
* </p>
|
||||||
|
* <p>
|
||||||
|
* Note the following database documented limitations:
|
||||||
|
* <ul>
|
||||||
|
* <li>JDBC Timestamp Datatype Low Value -4713 and High Value 9999</li>
|
||||||
|
* <li>MySQL 8: the range for DATETIME values is '1000-01-01 00:00:00.000000' to '9999-12-31 23:59:59.999999`</li>
|
||||||
|
* <li>Postgresql 12: Timestamp [without time zone] Low Value 4713 BC and High Value 294276 AD</li>
|
||||||
|
* <li>Oracle: Timestamp Low Value 4712 BC and High Value 9999 CE</li>
|
||||||
|
* <li>H2: datetime2 Low Value -4713 and High Value 9999</li>
|
||||||
|
* </ul>
|
||||||
|
* </p>
|
||||||
|
*
|
||||||
|
* @see #getPeriodIndexEndOfTime()
|
||||||
|
* @since 5.1.0
|
||||||
|
*/
|
||||||
|
public IPrimitiveType<Date> getPeriodIndexStartOfTime() {
|
||||||
|
return myPeriodIndexStartOfTime;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* When indexing a Period (e.g. Encounter.period) where the period has an upper bound
|
||||||
|
* but not a lower bound, a canned "start of time" value can be used as the lower bound
|
||||||
|
* in order to allow range searches to correctly identify all values in the range.
|
||||||
|
* <p>
|
||||||
|
* The default value for this is {@link #DEFAULT_PERIOD_INDEX_START_OF_TIME} which
|
||||||
|
* is probably good enough for almost any application, but this can be changed if
|
||||||
|
* needed.
|
||||||
|
* </p>
|
||||||
|
* <p>
|
||||||
|
* Note the following database documented limitations:
|
||||||
|
* <ul>
|
||||||
|
* <li>JDBC Timestamp Datatype Low Value -4713 and High Value 9999</li>
|
||||||
|
* <li>MySQL 8: the range for DATETIME values is '1000-01-01 00:00:00.000000' to '9999-12-31 23:59:59.999999`</li>
|
||||||
|
* <li>Postgresql 12: Timestamp [without time zone] Low Value 4713 BC and High Value 294276 AD</li>
|
||||||
|
* <li>Oracle: Timestamp Low Value 4712 BC and High Value 9999 CE</li>
|
||||||
|
* <li>H2: datetime2 Low Value -4713 and High Value 9999</li>
|
||||||
|
* </ul>
|
||||||
|
* </p>
|
||||||
|
*
|
||||||
|
* @see #getPeriodIndexEndOfTime()
|
||||||
|
* @since 5.1.0
|
||||||
|
*/
|
||||||
|
public void setPeriodIndexStartOfTime(IPrimitiveType<Date> thePeriodIndexStartOfTime) {
|
||||||
|
Validate.notNull(thePeriodIndexStartOfTime, "thePeriodIndexStartOfTime must not be null");
|
||||||
|
myPeriodIndexStartOfTime = thePeriodIndexStartOfTime;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* When indexing a Period (e.g. Encounter.period) where the period has a lower bound
|
||||||
|
* but not an upper bound, a canned "end of time" value can be used as the upper bound
|
||||||
|
* in order to allow range searches to correctly identify all values in the range.
|
||||||
|
* <p>
|
||||||
|
* The default value for this is {@link #DEFAULT_PERIOD_INDEX_START_OF_TIME} which
|
||||||
|
* is probably good enough for almost any application, but this can be changed if
|
||||||
|
* needed.
|
||||||
|
* </p>
|
||||||
|
* <p>
|
||||||
|
* Note the following database documented limitations:
|
||||||
|
* <ul>
|
||||||
|
* <li>JDBC Timestamp Datatype Low Value -4713 and High Value 9999</li>
|
||||||
|
* <li>MySQL 8: the range for DATETIME values is '1000-01-01 00:00:00.000000' to '9999-12-31 23:59:59.999999`</li>
|
||||||
|
* <li>Postgresql 12: Timestamp [without time zone] Low Value 4713 BC and High Value 294276 AD</li>
|
||||||
|
* <li>Oracle: Timestamp Low Value 4712 BC and High Value 9999 CE</li>
|
||||||
|
* <li>H2: datetime2 Low Value -4713 and High Value 9999</li>
|
||||||
|
* </ul>
|
||||||
|
* </p>
|
||||||
|
*
|
||||||
|
* @see #getPeriodIndexStartOfTime()
|
||||||
|
* @since 5.1.0
|
||||||
|
*/
|
||||||
|
public IPrimitiveType<Date> getPeriodIndexEndOfTime() {
|
||||||
|
return myPeriodIndexEndOfTime;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* When indexing a Period (e.g. Encounter.period) where the period has an upper bound
|
||||||
|
* but not a lower bound, a canned "start of time" value can be used as the lower bound
|
||||||
|
* in order to allow range searches to correctly identify all values in the range.
|
||||||
|
* <p>
|
||||||
|
* The default value for this is {@link #DEFAULT_PERIOD_INDEX_START_OF_TIME} which
|
||||||
|
* is probably good enough for almost any application, but this can be changed if
|
||||||
|
* needed.
|
||||||
|
* </p>
|
||||||
|
* <p>
|
||||||
|
* Note the following database documented limitations:
|
||||||
|
* <ul>
|
||||||
|
* <li>JDBC Timestamp Datatype Low Value -4713 and High Value 9999</li>
|
||||||
|
* <li>MySQL 8: the range for DATETIME values is '1000-01-01 00:00:00.000000' to '9999-12-31 23:59:59.999999`</li>
|
||||||
|
* <li>Postgresql 12: Timestamp [without time zone] Low Value 4713 BC and High Value 294276 AD</li>
|
||||||
|
* <li>Oracle: Timestamp Low Value 4712 BC and High Value 9999 CE</li>
|
||||||
|
* <li>H2: datetime2 Low Value -4713 and High Value 9999</li>
|
||||||
|
* </ul>
|
||||||
|
* </p>
|
||||||
|
*
|
||||||
|
* @see #getPeriodIndexStartOfTime()
|
||||||
|
* @since 5.1.0
|
||||||
|
*/
|
||||||
|
public void setPeriodIndexEndOfTime(IPrimitiveType<Date> thePeriodIndexEndOfTime) {
|
||||||
|
Validate.notNull(thePeriodIndexEndOfTime, "thePeriodIndexEndOfTime must not be null");
|
||||||
|
myPeriodIndexEndOfTime = thePeriodIndexEndOfTime;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
private static void validateTreatBaseUrlsAsLocal(String theUrl) {
|
private static void validateTreatBaseUrlsAsLocal(String theUrl) {
|
||||||
Validate.notBlank(theUrl, "Base URL must not be null or empty");
|
Validate.notBlank(theUrl, "Base URL must not be null or empty");
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,31 @@
|
||||||
|
package ca.uhn.fhir.jpa.model.entity;
|
||||||
|
|
||||||
|
import org.hibernate.search.annotations.Field;
|
||||||
|
import org.hibernate.search.annotations.IndexedEmbedded;
|
||||||
|
|
||||||
|
import javax.persistence.*;
|
||||||
|
import java.util.Set;
|
||||||
|
|
||||||
|
@Embeddable
|
||||||
|
public class ObservationIndexedCategoryCodeableConceptEntity {
|
||||||
|
|
||||||
|
@Field(name = "text")
|
||||||
|
private String myCodeableConceptText;
|
||||||
|
|
||||||
|
@IndexedEmbedded(depth=2, prefix = "coding")
|
||||||
|
@OneToMany(mappedBy = "myCodeableConceptId", cascade = CascadeType.ALL, fetch = FetchType.LAZY, orphanRemoval = true)
|
||||||
|
private Set<ObservationIndexedCategoryCodingEntity> myObservationIndexedCategoryCodingEntitySet;
|
||||||
|
|
||||||
|
public ObservationIndexedCategoryCodeableConceptEntity(String theCodeableConceptText) {
|
||||||
|
setCodeableConceptText(theCodeableConceptText);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setObservationIndexedCategoryCodingEntitySet(Set<ObservationIndexedCategoryCodingEntity> theObservationIndexedCategoryCodingEntitySet) {
|
||||||
|
myObservationIndexedCategoryCodingEntitySet = theObservationIndexedCategoryCodingEntitySet;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setCodeableConceptText(String theCodeableConceptText) {
|
||||||
|
myCodeableConceptText = theCodeableConceptText;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,28 @@
|
||||||
|
package ca.uhn.fhir.jpa.model.entity;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.model.util.CodeSystemHash;
|
||||||
|
import org.hibernate.search.annotations.Analyze;
|
||||||
|
import org.hibernate.search.annotations.Field;
|
||||||
|
|
||||||
|
import javax.persistence.*;
|
||||||
|
|
||||||
|
@Embeddable
|
||||||
|
public class ObservationIndexedCategoryCodingEntity {
|
||||||
|
|
||||||
|
@Field (name = "code", analyze = Analyze.NO)
|
||||||
|
private String myCode;
|
||||||
|
@Field (name = "system", analyze = Analyze.NO)
|
||||||
|
private String mySystem;
|
||||||
|
@Field (name = "code_system_hash", analyze = Analyze.NO)
|
||||||
|
private String myCodeSystemHash;
|
||||||
|
@Field (name = "display")
|
||||||
|
private String myDisplay;
|
||||||
|
|
||||||
|
public ObservationIndexedCategoryCodingEntity(String theSystem, String theCode, String theDisplay) {
|
||||||
|
myCode = theCode;
|
||||||
|
mySystem = theSystem;
|
||||||
|
myCodeSystemHash = String.valueOf(CodeSystemHash.hashCodeSystem(theSystem, theCode));
|
||||||
|
myDisplay = theDisplay;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,61 @@
|
||||||
|
package ca.uhn.fhir.jpa.model.entity;
|
||||||
|
|
||||||
|
import org.hibernate.search.annotations.DocumentId;
|
||||||
|
import org.hibernate.search.annotations.Field;
|
||||||
|
import org.hibernate.search.annotations.Indexed;
|
||||||
|
import org.hibernate.search.annotations.IndexedEmbedded;
|
||||||
|
|
||||||
|
import javax.persistence.*;
|
||||||
|
|
||||||
|
@Entity
|
||||||
|
@Indexed(index = "code_index")
|
||||||
|
@Embeddable
|
||||||
|
@Table(name = "HFJ_SPIDX_LASTN_CODE_CONCEPT")
|
||||||
|
public class ObservationIndexedCodeCodeableConceptEntity {
|
||||||
|
|
||||||
|
public static final int MAX_LENGTH = 200;
|
||||||
|
|
||||||
|
@Id
|
||||||
|
@DocumentId(name = "codeable_concept_id")
|
||||||
|
@Column(name="CODEABLE_CONCEPT_ID", length = MAX_LENGTH)
|
||||||
|
private String myCodeableConceptId;
|
||||||
|
|
||||||
|
@Field(name = "text")
|
||||||
|
@Column(name = "CODEABLE_CONCEPT_TEXT", nullable = true, length = MAX_LENGTH)
|
||||||
|
private String myCodeableConceptText;
|
||||||
|
|
||||||
|
@IndexedEmbedded(depth=2, prefix = "coding")
|
||||||
|
@JoinColumn(name = "CODEABLE_CONCEPT_ID", nullable = false, updatable = false, foreignKey = @ForeignKey(name = "FK_CONCEPT_CODE"))
|
||||||
|
@OneToOne(cascade = CascadeType.ALL, fetch = FetchType.LAZY, orphanRemoval = true)
|
||||||
|
private ObservationIndexedCodeCodingEntity myObservationIndexedCodeCodingEntity;
|
||||||
|
|
||||||
|
public ObservationIndexedCodeCodeableConceptEntity() {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
public ObservationIndexedCodeCodeableConceptEntity(String theCodeableConceptText, String theCodeableConceptId) {
|
||||||
|
setCodeableConceptText(theCodeableConceptText);
|
||||||
|
setCodeableConceptId(theCodeableConceptId);
|
||||||
|
}
|
||||||
|
|
||||||
|
public void addCoding(ObservationIndexedCodeCodingEntity theObservationIndexedCodeCodingEntity) {
|
||||||
|
myObservationIndexedCodeCodingEntity = theObservationIndexedCodeCodingEntity;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getCodeableConceptId() {
|
||||||
|
return myCodeableConceptId;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setCodeableConceptId(String theCodeableConceptId) {
|
||||||
|
myCodeableConceptId = theCodeableConceptId;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getCodeableConceptText() {
|
||||||
|
return myCodeableConceptText;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setCodeableConceptText(String theCodeableConceptText) {
|
||||||
|
myCodeableConceptText = theCodeableConceptText;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,43 @@
|
||||||
|
package ca.uhn.fhir.jpa.model.entity;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.jpa.model.util.CodeSystemHash;
|
||||||
|
import org.hibernate.search.annotations.Analyze;
|
||||||
|
import org.hibernate.search.annotations.Field;
|
||||||
|
|
||||||
|
import javax.persistence.*;
|
||||||
|
|
||||||
|
@Entity
|
||||||
|
@Embeddable
|
||||||
|
@Table(name = "HFJ_SPIDX_LASTN_CODING")
|
||||||
|
public class ObservationIndexedCodeCodingEntity {
|
||||||
|
|
||||||
|
public static final int MAX_LENGTH = 200;
|
||||||
|
|
||||||
|
@Id
|
||||||
|
@Column(name = "CODEABLE_CONCEPT_ID", length = MAX_LENGTH)
|
||||||
|
private String myCodeableConceptId;
|
||||||
|
|
||||||
|
@Field(name = "code", analyze = Analyze.NO)
|
||||||
|
private String myCode;
|
||||||
|
|
||||||
|
@Field(name = "system", analyze = Analyze.NO)
|
||||||
|
private String mySystem;
|
||||||
|
|
||||||
|
@Field(name = "code_system_hash", analyze = Analyze.NO)
|
||||||
|
private String myCodeSystemHash;
|
||||||
|
|
||||||
|
@Field(name = "display")
|
||||||
|
private String myDisplay;
|
||||||
|
|
||||||
|
public ObservationIndexedCodeCodingEntity() {
|
||||||
|
}
|
||||||
|
|
||||||
|
public ObservationIndexedCodeCodingEntity(String theSystem, String theCode, String theDisplay, String theCodeableConceptId) {
|
||||||
|
myCode = theCode;
|
||||||
|
mySystem = theSystem;
|
||||||
|
myCodeSystemHash = String.valueOf(CodeSystemHash.hashCodeSystem(theSystem, theCode));
|
||||||
|
myDisplay = theDisplay;
|
||||||
|
myCodeableConceptId = theCodeableConceptId;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -0,0 +1,94 @@
|
||||||
|
package ca.uhn.fhir.jpa.model.entity;
|
||||||
|
|
||||||
|
import org.hibernate.search.annotations.*;
|
||||||
|
|
||||||
|
import javax.persistence.*;
|
||||||
|
import javax.persistence.Index;
|
||||||
|
import java.util.*;
|
||||||
|
|
||||||
|
@Entity
|
||||||
|
@Table(name = "HFJ_LASTN_OBSERVATION", indexes = {
|
||||||
|
@Index(name = "IDX_LASTN_OBSERVATION_RESID", columnList = "RESOURCE_IDENTIFIER", unique = true)
|
||||||
|
})
|
||||||
|
@Indexed(index = "observation_index")
|
||||||
|
public class ObservationIndexedSearchParamLastNEntity {
|
||||||
|
|
||||||
|
public static final int MAX_LENGTH = 200;
|
||||||
|
|
||||||
|
@Id
|
||||||
|
@SequenceGenerator(name = "SEQ_LASTN", sequenceName = "SEQ_LASTN")
|
||||||
|
@GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_LASTN")
|
||||||
|
@Column(name = "LASTN_ID")
|
||||||
|
private Long myId;
|
||||||
|
|
||||||
|
@Field(name = "subject", analyze = Analyze.NO)
|
||||||
|
@Column(name = "LASTN_SUBJECT_ID", nullable = true, length = MAX_LENGTH)
|
||||||
|
private String mySubject;
|
||||||
|
|
||||||
|
@ManyToOne(fetch = FetchType.LAZY)
|
||||||
|
@JoinColumn(name = "CODEABLE_CONCEPT_ID", nullable = false, updatable = false, foreignKey = @ForeignKey(name = "FK_OBSERVATION_CODE_FK"))
|
||||||
|
@IndexedEmbedded(depth = 2, prefix = "codeconcept")
|
||||||
|
private ObservationIndexedCodeCodeableConceptEntity myObservationCode;
|
||||||
|
|
||||||
|
@Field(name = "codeconceptid", analyze = Analyze.NO)
|
||||||
|
@Column(name = "CODEABLE_CONCEPT_ID", nullable = false, updatable = false, insertable = false, length = MAX_LENGTH)
|
||||||
|
private String myCodeNormalizedId;
|
||||||
|
|
||||||
|
@IndexedEmbedded(depth = 2, prefix = "categoryconcept")
|
||||||
|
@Transient
|
||||||
|
private Set<ObservationIndexedCategoryCodeableConceptEntity> myCategoryCodeableConcepts;
|
||||||
|
|
||||||
|
@Field(name = "effectivedtm", analyze = Analyze.NO)
|
||||||
|
@Temporal(TemporalType.TIMESTAMP)
|
||||||
|
@Column(name = "LASTN_EFFECTIVE_DATETIME", nullable = true)
|
||||||
|
private Date myEffectiveDtm;
|
||||||
|
|
||||||
|
@DocumentId(name = "identifier")
|
||||||
|
@Column(name = "RESOURCE_IDENTIFIER", nullable = false, length = MAX_LENGTH)
|
||||||
|
private String myIdentifier;
|
||||||
|
|
||||||
|
public ObservationIndexedSearchParamLastNEntity() {
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getSubject() {
|
||||||
|
return mySubject;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setSubject(String theSubject) {
|
||||||
|
mySubject = theSubject;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getIdentifier() {
|
||||||
|
return myIdentifier;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setIdentifier(String theIdentifier) {
|
||||||
|
myIdentifier = theIdentifier;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setEffectiveDtm(Date theEffectiveDtm) {
|
||||||
|
myEffectiveDtm = theEffectiveDtm;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Date getEffectiveDtm() {
|
||||||
|
return myEffectiveDtm;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setCodeNormalizedId(String theCodeNormalizedId) {
|
||||||
|
myCodeNormalizedId = theCodeNormalizedId;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getCodeNormalizedId() {
|
||||||
|
return myCodeNormalizedId;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
public void setObservationCode(ObservationIndexedCodeCodeableConceptEntity theObservationCode) {
|
||||||
|
myObservationCode = theObservationCode;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setCategoryCodeableConcepts(Set<ObservationIndexedCategoryCodeableConceptEntity> theCategoryCodeableConcepts) {
|
||||||
|
myCategoryCodeableConcepts = theCategoryCodeableConcepts;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -59,7 +59,7 @@ import static org.apache.commons.lang3.StringUtils.trim;
|
||||||
@Index(name = "IDX_SP_TOKEN_HASH_S", columnList = "HASH_SYS"),
|
@Index(name = "IDX_SP_TOKEN_HASH_S", columnList = "HASH_SYS"),
|
||||||
@Index(name = "IDX_SP_TOKEN_HASH_SV", columnList = "HASH_SYS_AND_VALUE"),
|
@Index(name = "IDX_SP_TOKEN_HASH_SV", columnList = "HASH_SYS_AND_VALUE"),
|
||||||
// TODO PERF change this to:
|
// TODO PERF change this to:
|
||||||
// @Index(name = "IDX_SP_TOKEN_HASH_V", columnList = "HASH_VALUE,RES_ID"),
|
// @Index(name = "IDX_SP_TOKEN_HASH_V", columnList = "HASH_VALUE,RES_ID"),
|
||||||
@Index(name = "IDX_SP_TOKEN_HASH_V", columnList = "HASH_VALUE"),
|
@Index(name = "IDX_SP_TOKEN_HASH_V", columnList = "HASH_VALUE"),
|
||||||
|
|
||||||
@Index(name = "IDX_SP_TOKEN_UPDATED", columnList = "SP_UPDATED"),
|
@Index(name = "IDX_SP_TOKEN_UPDATED", columnList = "SP_UPDATED"),
|
||||||
|
@ -232,6 +232,7 @@ public class ResourceIndexedSearchParamToken extends BaseResourceIndexedSearchPa
|
||||||
b.append(getHashValue());
|
b.append(getHashValue());
|
||||||
b.append(getHashSystem());
|
b.append(getHashSystem());
|
||||||
b.append(getHashSystemAndValue());
|
b.append(getHashSystemAndValue());
|
||||||
|
|
||||||
return b.toHashCode();
|
return b.toHashCode();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -37,6 +37,7 @@ public class SearchRuntimeDetails {
|
||||||
private boolean myLoadSynchronous;
|
private boolean myLoadSynchronous;
|
||||||
private String myQueryString;
|
private String myQueryString;
|
||||||
private SearchStatusEnum mySearchStatus;
|
private SearchStatusEnum mySearchStatus;
|
||||||
|
private int myFoundIndexMatchesCount;
|
||||||
public SearchRuntimeDetails(RequestDetails theRequestDetails, String theSearchUuid) {
|
public SearchRuntimeDetails(RequestDetails theRequestDetails, String theSearchUuid) {
|
||||||
myRequestDetails = theRequestDetails;
|
myRequestDetails = theRequestDetails;
|
||||||
mySearchUuid = theSearchUuid;
|
mySearchUuid = theSearchUuid;
|
||||||
|
@ -67,6 +68,14 @@ public class SearchRuntimeDetails {
|
||||||
myFoundMatchesCount = theFoundMatchesCount;
|
myFoundMatchesCount = theFoundMatchesCount;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public int getFoundIndexMatchesCount() {
|
||||||
|
return myFoundIndexMatchesCount;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setFoundIndexMatchesCount(int theFoundIndexMatchesCount) {
|
||||||
|
myFoundIndexMatchesCount = theFoundIndexMatchesCount;
|
||||||
|
}
|
||||||
|
|
||||||
public boolean getLoadSynchronous() {
|
public boolean getLoadSynchronous() {
|
||||||
return myLoadSynchronous;
|
return myLoadSynchronous;
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,33 @@
|
||||||
|
package ca.uhn.fhir.jpa.model.util;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.util.UrlUtil;
|
||||||
|
import com.google.common.base.Charsets;
|
||||||
|
import com.google.common.hash.HashCode;
|
||||||
|
import com.google.common.hash.HashFunction;
|
||||||
|
import com.google.common.hash.Hasher;
|
||||||
|
import com.google.common.hash.Hashing;
|
||||||
|
|
||||||
|
public class CodeSystemHash {
|
||||||
|
private static final HashFunction HASH_FUNCTION = Hashing.murmur3_128(0);
|
||||||
|
private static final byte[] DELIMITER_BYTES = "|".getBytes(Charsets.UTF_8);
|
||||||
|
|
||||||
|
static public long hashCodeSystem( String system, String code ) {
|
||||||
|
Hasher hasher = HASH_FUNCTION.newHasher();
|
||||||
|
addStringToHasher(hasher, system);
|
||||||
|
addStringToHasher(hasher, code);
|
||||||
|
|
||||||
|
HashCode hashCode = hasher.hash();
|
||||||
|
return hashCode.asLong();
|
||||||
|
}
|
||||||
|
|
||||||
|
static private void addStringToHasher(Hasher hasher, String next) {
|
||||||
|
if (next == null) {
|
||||||
|
hasher.putByte((byte) 0);
|
||||||
|
} else {
|
||||||
|
next = UrlUtil.escapeUrlParam(next);
|
||||||
|
byte[] bytes = next.getBytes(Charsets.UTF_8);
|
||||||
|
hasher.putBytes(bytes);
|
||||||
|
}
|
||||||
|
hasher.putBytes(DELIMITER_BYTES);
|
||||||
|
}
|
||||||
|
}
|
|
@ -161,6 +161,11 @@ public class JpaConstants {
|
||||||
* Operation name for the "$export-poll-status" operation
|
* Operation name for the "$export-poll-status" operation
|
||||||
*/
|
*/
|
||||||
public static final String OPERATION_EXPORT_POLL_STATUS = "$export-poll-status";
|
public static final String OPERATION_EXPORT_POLL_STATUS = "$export-poll-status";
|
||||||
|
/**
|
||||||
|
* Operation name for the "$lastn" operation
|
||||||
|
*/
|
||||||
|
public static final String OPERATION_LASTN = "$lastn";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* <p>
|
* <p>
|
||||||
* This extension should be of type <code>string</code> and should be
|
* This extension should be of type <code>string</code> and should be
|
||||||
|
|
|
@ -61,6 +61,8 @@ public class SearchParameterMap implements Serializable {
|
||||||
private SummaryEnum mySummaryMode;
|
private SummaryEnum mySummaryMode;
|
||||||
private SearchTotalModeEnum mySearchTotalMode;
|
private SearchTotalModeEnum mySearchTotalMode;
|
||||||
private QuantityParam myNearDistanceParam;
|
private QuantityParam myNearDistanceParam;
|
||||||
|
private boolean myLastN;
|
||||||
|
private Integer myLastNMax;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Constructor
|
* Constructor
|
||||||
|
@ -303,6 +305,42 @@ public class SearchParameterMap implements Serializable {
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* If set, tells the server to use an Elasticsearch query to generate a list of
|
||||||
|
* Resource IDs for the LastN operation
|
||||||
|
*/
|
||||||
|
public boolean isLastN() {
|
||||||
|
return myLastN;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* If set, tells the server to use an Elasticsearch query to generate a list of
|
||||||
|
* Resource IDs for the LastN operation
|
||||||
|
*/
|
||||||
|
public SearchParameterMap setLastN(boolean theLastN) {
|
||||||
|
myLastN = theLastN;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* If set, tells the server the maximum number of observations to return for each
|
||||||
|
* observation code in the result set of a lastn operation
|
||||||
|
*/
|
||||||
|
public Integer getLastNMax() {
|
||||||
|
return myLastNMax;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* If set, tells the server the maximum number of observations to return for each
|
||||||
|
* observation code in the result set of a lastn operation
|
||||||
|
*/
|
||||||
|
public SearchParameterMap setLastNMax(Integer theLastNMax) {
|
||||||
|
myLastNMax = theLastNMax;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This method creates a URL query string representation of the parameters in this
|
* This method creates a URL query string representation of the parameters in this
|
||||||
* object, excluding the part before the parameters, e.g.
|
* object, excluding the part before the parameters, e.g.
|
||||||
|
|
|
@ -162,17 +162,25 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor
|
||||||
}
|
}
|
||||||
|
|
||||||
private IExtractor<PathAndRef> createReferenceExtractor() {
|
private IExtractor<PathAndRef> createReferenceExtractor() {
|
||||||
return (params, searchParam, value, path) -> {
|
return new ResourceLinkExtractor();
|
||||||
if (value instanceof IBaseResource) {
|
}
|
||||||
|
|
||||||
|
private class ResourceLinkExtractor implements IExtractor<PathAndRef> {
|
||||||
|
|
||||||
|
private PathAndRef myPathAndRef = null;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void extract(SearchParamSet<PathAndRef> theParams, RuntimeSearchParam theSearchParam, IBase theValue, String thePath) {
|
||||||
|
if (theValue instanceof IBaseResource) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
String nextType = toRootTypeName(value);
|
String nextType = toRootTypeName(theValue);
|
||||||
switch (nextType) {
|
switch (nextType) {
|
||||||
case "uri":
|
case "uri":
|
||||||
case "canonical":
|
case "canonical":
|
||||||
String typeName = toTypeName(value);
|
String typeName = toTypeName(theValue);
|
||||||
IPrimitiveType<?> valuePrimitive = (IPrimitiveType<?>) value;
|
IPrimitiveType<?> valuePrimitive = (IPrimitiveType<?>) theValue;
|
||||||
IBaseReference fakeReference = (IBaseReference) myContext.getElementDefinition("Reference").newInstance();
|
IBaseReference fakeReference = (IBaseReference) myContext.getElementDefinition("Reference").newInstance();
|
||||||
fakeReference.setReference(valuePrimitive.getValueAsString());
|
fakeReference.setReference(valuePrimitive.getValueAsString());
|
||||||
|
|
||||||
|
@ -188,23 +196,23 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor
|
||||||
*/
|
*/
|
||||||
IIdType parsed = fakeReference.getReferenceElement();
|
IIdType parsed = fakeReference.getReferenceElement();
|
||||||
if (parsed.hasIdPart() && parsed.hasResourceType() && !parsed.isAbsolute()) {
|
if (parsed.hasIdPart() && parsed.hasResourceType() && !parsed.isAbsolute()) {
|
||||||
PathAndRef ref = new PathAndRef(searchParam.getName(), path, fakeReference, false);
|
myPathAndRef = new PathAndRef(theSearchParam.getName(), thePath, fakeReference, false);
|
||||||
params.add(ref);
|
theParams.add(myPathAndRef);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (parsed.isAbsolute()) {
|
if (parsed.isAbsolute()) {
|
||||||
PathAndRef ref = new PathAndRef(searchParam.getName(), path, fakeReference, true);
|
myPathAndRef = new PathAndRef(theSearchParam.getName(), thePath, fakeReference, true);
|
||||||
params.add(ref);
|
theParams.add(myPathAndRef);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
params.addWarning("Ignoring canonical reference (indexing canonical is not yet supported)");
|
theParams.addWarning("Ignoring canonical reference (indexing canonical is not yet supported)");
|
||||||
break;
|
break;
|
||||||
case "reference":
|
case "reference":
|
||||||
case "Reference":
|
case "Reference":
|
||||||
IBaseReference valueRef = (IBaseReference) value;
|
IBaseReference valueRef = (IBaseReference) theValue;
|
||||||
|
|
||||||
IIdType nextId = valueRef.getReferenceElement();
|
IIdType nextId = valueRef.getReferenceElement();
|
||||||
if (nextId.isEmpty() && valueRef.getResource() != null) {
|
if (nextId.isEmpty() && valueRef.getResource() != null) {
|
||||||
|
@ -218,14 +226,27 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
PathAndRef ref = new PathAndRef(searchParam.getName(), path, valueRef, false);
|
myPathAndRef = new PathAndRef(theSearchParam.getName(), thePath, valueRef, false);
|
||||||
params.add(ref);
|
theParams.add(myPathAndRef);
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
addUnexpectedDatatypeWarning(params, searchParam, value);
|
addUnexpectedDatatypeWarning(theParams, theSearchParam, theValue);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
};
|
}
|
||||||
|
|
||||||
|
public PathAndRef get(IBase theValue, String thePath) {
|
||||||
|
extract(new SearchParamSet<>(),
|
||||||
|
new RuntimeSearchParam(null, null, "Reference", null, null, null, null, null, null, null),
|
||||||
|
theValue, thePath);
|
||||||
|
return myPathAndRef;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public PathAndRef extractReferenceLinkFromResource(IBase theValue, String thePath) {
|
||||||
|
ResourceLinkExtractor extractor = new ResourceLinkExtractor();
|
||||||
|
return extractor.get(theValue, thePath);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -375,29 +396,126 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor
|
||||||
}
|
}
|
||||||
|
|
||||||
private IExtractor<ResourceIndexedSearchParamDate> createDateExtractor(IBaseResource theResource) {
|
private IExtractor<ResourceIndexedSearchParamDate> createDateExtractor(IBaseResource theResource) {
|
||||||
return (params, searchParam, value, path) -> {
|
return new DateExtractor(theResource);
|
||||||
String nextType = toRootTypeName(value);
|
}
|
||||||
String resourceType = toRootTypeName(theResource);
|
|
||||||
|
private class DateExtractor implements IExtractor<ResourceIndexedSearchParamDate> {
|
||||||
|
|
||||||
|
String myResourceType;
|
||||||
|
ResourceIndexedSearchParamDate myIndexedSearchParamDate = null;
|
||||||
|
|
||||||
|
public DateExtractor(IBaseResource theResource) {
|
||||||
|
myResourceType = toRootTypeName(theResource);
|
||||||
|
}
|
||||||
|
|
||||||
|
public DateExtractor(String theResourceType) {
|
||||||
|
myResourceType = theResourceType;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void extract(SearchParamSet theParams, RuntimeSearchParam theSearchParam, IBase theValue, String thePath) {
|
||||||
|
String nextType = toRootTypeName(theValue);
|
||||||
switch (nextType) {
|
switch (nextType) {
|
||||||
case "date":
|
case "date":
|
||||||
case "dateTime":
|
case "dateTime":
|
||||||
case "instant":
|
case "instant":
|
||||||
addDateTimeTypes(resourceType, params, searchParam, value);
|
addDateTimeTypes(myResourceType, theParams, theSearchParam, theValue);
|
||||||
break;
|
break;
|
||||||
case "Period":
|
case "Period":
|
||||||
addDate_Period(resourceType, params, searchParam, value);
|
addDate_Period(myResourceType, theParams, theSearchParam, theValue);
|
||||||
break;
|
break;
|
||||||
case "Timing":
|
case "Timing":
|
||||||
addDate_Timing(resourceType, params, searchParam, value);
|
addDate_Timing(myResourceType, theParams, theSearchParam, theValue);
|
||||||
break;
|
break;
|
||||||
case "string":
|
case "string":
|
||||||
// CarePlan.activitydate can be a string - ignored for now
|
// CarePlan.activitydate can be a string - ignored for now
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
addUnexpectedDatatypeWarning(params, searchParam, value);
|
addUnexpectedDatatypeWarning(theParams, theSearchParam, theValue);
|
||||||
break;
|
break;
|
||||||
|
|
||||||
}
|
}
|
||||||
};
|
}
|
||||||
|
|
||||||
|
private void addDate_Period(String theResourceType, Set<ResourceIndexedSearchParamDate> theParams, RuntimeSearchParam theSearchParam, IBase theValue) {
|
||||||
|
Date start = extractValueAsDate(myPeriodStartValueChild, theValue);
|
||||||
|
String startAsString = extractValueAsString(myPeriodStartValueChild, theValue);
|
||||||
|
Date end = extractValueAsDate(myPeriodEndValueChild, theValue);
|
||||||
|
String endAsString = extractValueAsString(myPeriodEndValueChild, theValue);
|
||||||
|
|
||||||
|
if (start != null || end != null) {
|
||||||
|
|
||||||
|
if (start == null) {
|
||||||
|
start = myModelConfig.getPeriodIndexStartOfTime().getValue();
|
||||||
|
startAsString = myModelConfig.getPeriodIndexStartOfTime().getValueAsString();
|
||||||
|
}
|
||||||
|
if (end == null) {
|
||||||
|
end = myModelConfig.getPeriodIndexEndOfTime().getValue();
|
||||||
|
endAsString = myModelConfig.getPeriodIndexEndOfTime().getValueAsString();
|
||||||
|
}
|
||||||
|
|
||||||
|
myIndexedSearchParamDate = new ResourceIndexedSearchParamDate(myPartitionSettings, theResourceType, theSearchParam.getName(), start, startAsString, end, endAsString, startAsString);
|
||||||
|
theParams.add(myIndexedSearchParamDate);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void addDate_Timing(String theResourceType, Set<ResourceIndexedSearchParamDate> theParams, RuntimeSearchParam theSearchParam, IBase theValue) {
|
||||||
|
List<IPrimitiveType<Date>> values = extractValuesAsFhirDates(myTimingEventValueChild, theValue);
|
||||||
|
|
||||||
|
TreeSet<Date> dates = new TreeSet<>();
|
||||||
|
String firstValue = null;
|
||||||
|
String finalValue = null;
|
||||||
|
for (IPrimitiveType<Date> nextEvent : values) {
|
||||||
|
if (nextEvent.getValue() != null) {
|
||||||
|
dates.add(nextEvent.getValue());
|
||||||
|
if (firstValue == null) {
|
||||||
|
firstValue = nextEvent.getValueAsString();
|
||||||
|
}
|
||||||
|
finalValue = nextEvent.getValueAsString();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Optional<IBase> repeat = myTimingRepeatValueChild.getAccessor().getFirstValueOrNull(theValue);
|
||||||
|
if (repeat.isPresent()) {
|
||||||
|
Optional<IBase> bounds = myTimingRepeatBoundsValueChild.getAccessor().getFirstValueOrNull(repeat.get());
|
||||||
|
if (bounds.isPresent()) {
|
||||||
|
String boundsType = toRootTypeName(bounds.get());
|
||||||
|
if ("Period".equals(boundsType)) {
|
||||||
|
Date start = extractValueAsDate(myPeriodStartValueChild, bounds.get());
|
||||||
|
Date end = extractValueAsDate(myPeriodEndValueChild, bounds.get());
|
||||||
|
dates.add(start);
|
||||||
|
dates.add(end);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!dates.isEmpty()) {
|
||||||
|
myIndexedSearchParamDate = new ResourceIndexedSearchParamDate(myPartitionSettings, theResourceType, theSearchParam.getName(), dates.first(), firstValue, dates.last(), finalValue, firstValue);
|
||||||
|
theParams.add(myIndexedSearchParamDate);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
|
private void addDateTimeTypes(String theResourceType, Set<ResourceIndexedSearchParamDate> theParams, RuntimeSearchParam theSearchParam, IBase theValue) {
|
||||||
|
IPrimitiveType<Date> nextBaseDateTime = (IPrimitiveType<Date>) theValue;
|
||||||
|
if (nextBaseDateTime.getValue() != null) {
|
||||||
|
myIndexedSearchParamDate = new ResourceIndexedSearchParamDate(myPartitionSettings, theResourceType, theSearchParam.getName(), nextBaseDateTime.getValue(), nextBaseDateTime.getValueAsString(), nextBaseDateTime.getValue(), nextBaseDateTime.getValueAsString(), nextBaseDateTime.getValueAsString());
|
||||||
|
theParams.add(myIndexedSearchParamDate);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public ResourceIndexedSearchParamDate get(IBase theValue, String thePath) {
|
||||||
|
extract(new SearchParamSet<>(),
|
||||||
|
new RuntimeSearchParam(null, null, "date", null, null, null, null, null, null, null),
|
||||||
|
theValue, thePath);
|
||||||
|
return myIndexedSearchParamDate;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public Date extractDateFromResource(IBase theValue, String thePath) {
|
||||||
|
DateExtractor extractor = new DateExtractor("DateType");
|
||||||
|
return extractor.get(theValue, thePath).getValueHigh();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -508,7 +626,8 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor
|
||||||
/**
|
/**
|
||||||
* Override parent because we're using FHIRPath here
|
* Override parent because we're using FHIRPath here
|
||||||
*/
|
*/
|
||||||
private List<IBase> extractValues(String thePaths, IBaseResource theResource) {
|
@Override
|
||||||
|
public List<IBase> extractValues(String thePaths, IBaseResource theResource) {
|
||||||
List<IBase> values = new ArrayList<>();
|
List<IBase> values = new ArrayList<>();
|
||||||
if (isNotBlank(thePaths)) {
|
if (isNotBlank(thePaths)) {
|
||||||
String[] nextPathsSplit = split(thePaths);
|
String[] nextPathsSplit = split(thePaths);
|
||||||
|
@ -626,30 +745,73 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor
|
||||||
}
|
}
|
||||||
|
|
||||||
private void addToken_CodeableConcept(String theResourceType, Set<BaseResourceIndexedSearchParam> theParams, RuntimeSearchParam theSearchParam, IBase theValue) {
|
private void addToken_CodeableConcept(String theResourceType, Set<BaseResourceIndexedSearchParam> theParams, RuntimeSearchParam theSearchParam, IBase theValue) {
|
||||||
List<IBase> codings = myCodeableConceptCodingValueChild.getAccessor().getValues(theValue);
|
List<IBase> codings = getCodingsFromCodeableConcept(theValue);
|
||||||
for (IBase nextCoding : codings) {
|
for (IBase nextCoding : codings) {
|
||||||
addToken_Coding(theResourceType, theParams, theSearchParam, nextCoding);
|
addToken_Coding(theResourceType, theParams, theSearchParam, nextCoding);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (shouldIndexTextComponentOfToken(theSearchParam)) {
|
if (shouldIndexTextComponentOfToken(theSearchParam)) {
|
||||||
String text = extractValueAsString(myCodeableConceptTextValueChild, theValue);
|
String text = getDisplayTextFromCodeableConcept(theValue);
|
||||||
if (isNotBlank(text)) {
|
if (isNotBlank(text)) {
|
||||||
createStringIndexIfNotBlank(theResourceType, theParams, theSearchParam, text);
|
createStringIndexIfNotBlank(theResourceType, theParams, theSearchParam, text);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public List<IBase> getCodingsFromCodeableConcept(IBase theValue) {
|
||||||
|
String nextType = BaseSearchParamExtractor.this.toRootTypeName(theValue);
|
||||||
|
if ("CodeableConcept".equals(nextType)) {
|
||||||
|
return myCodeableConceptCodingValueChild.getAccessor().getValues(theValue);
|
||||||
|
} else {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getDisplayTextFromCodeableConcept(IBase theValue) {
|
||||||
|
String nextType = BaseSearchParamExtractor.this.toRootTypeName(theValue);
|
||||||
|
if ("CodeableConcept".equals(nextType)) {
|
||||||
|
return extractValueAsString(myCodeableConceptTextValueChild, theValue);
|
||||||
|
} else {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private void addToken_Coding(String theResourceType, Set<BaseResourceIndexedSearchParam> theParams, RuntimeSearchParam theSearchParam, IBase theValue) {
|
private void addToken_Coding(String theResourceType, Set<BaseResourceIndexedSearchParam> theParams, RuntimeSearchParam theSearchParam, IBase theValue) {
|
||||||
String system = extractValueAsString(myCodingSystemValueChild, theValue);
|
ResourceIndexedSearchParamToken resourceIndexedSearchParamToken = createSearchParamForCoding(theResourceType, theSearchParam, theValue);
|
||||||
String code = extractValueAsString(myCodingCodeValueChild, theValue);
|
if (resourceIndexedSearchParamToken != null) {
|
||||||
createTokenIndexIfNotBlank(theResourceType, theParams, theSearchParam, system, code);
|
theParams.add(resourceIndexedSearchParamToken);
|
||||||
|
}
|
||||||
|
|
||||||
if (shouldIndexTextComponentOfToken(theSearchParam)) {
|
if (shouldIndexTextComponentOfToken(theSearchParam)) {
|
||||||
String text = extractValueAsString(myCodingDisplayValueChild, theValue);
|
String text = getDisplayTextForCoding(theValue);
|
||||||
createStringIndexIfNotBlank(theResourceType, theParams, theSearchParam, text);
|
createStringIndexIfNotBlank(theResourceType, theParams, theSearchParam, text);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ResourceIndexedSearchParamToken createSearchParamForCoding(String theResourceType, RuntimeSearchParam theSearchParam, IBase theValue) {
|
||||||
|
String nextType = BaseSearchParamExtractor.this.toRootTypeName(theValue);
|
||||||
|
if ("Coding".equals(nextType)) {
|
||||||
|
String system = extractValueAsString(myCodingSystemValueChild, theValue);
|
||||||
|
String code = extractValueAsString(myCodingCodeValueChild, theValue);
|
||||||
|
return createTokenIndexIfNotBlank(theResourceType, theSearchParam, system, code);
|
||||||
|
} else {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String getDisplayTextForCoding(IBase theValue) {
|
||||||
|
String nextType = BaseSearchParamExtractor.this.toRootTypeName(theValue);
|
||||||
|
if ("Coding".equals(nextType)) {
|
||||||
|
return extractValueAsString(myCodingDisplayValueChild, theValue);
|
||||||
|
} else {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private void addToken_ContactPoint(String theResourceType, Set<BaseResourceIndexedSearchParam> theParams, RuntimeSearchParam theSearchParam, IBase theValue) {
|
private void addToken_ContactPoint(String theResourceType, Set<BaseResourceIndexedSearchParam> theParams, RuntimeSearchParam theSearchParam, IBase theValue) {
|
||||||
String system = extractValueAsString(myContactPointSystemValueChild, theValue);
|
String system = extractValueAsString(myContactPointSystemValueChild, theValue);
|
||||||
String value = extractValueAsString(myContactPointValueValueChild, theValue);
|
String value = extractValueAsString(myContactPointValueValueChild, theValue);
|
||||||
|
@ -677,6 +839,16 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor
|
||||||
String endAsString = extractValueAsString(myPeriodEndValueChild, theValue);
|
String endAsString = extractValueAsString(myPeriodEndValueChild, theValue);
|
||||||
|
|
||||||
if (start != null || end != null) {
|
if (start != null || end != null) {
|
||||||
|
|
||||||
|
if (start == null) {
|
||||||
|
start = myModelConfig.getPeriodIndexStartOfTime().getValue();
|
||||||
|
startAsString = myModelConfig.getPeriodIndexStartOfTime().getValueAsString();
|
||||||
|
}
|
||||||
|
if (end == null) {
|
||||||
|
end = myModelConfig.getPeriodIndexEndOfTime().getValue();
|
||||||
|
endAsString = myModelConfig.getPeriodIndexEndOfTime().getValueAsString();
|
||||||
|
}
|
||||||
|
|
||||||
ResourceIndexedSearchParamDate nextEntity = new ResourceIndexedSearchParamDate(myPartitionSettings, theResourceType, theSearchParam.getName(), start, startAsString, end, endAsString, startAsString);
|
ResourceIndexedSearchParamDate nextEntity = new ResourceIndexedSearchParamDate(myPartitionSettings, theResourceType, theSearchParam.getName(), start, startAsString, end, endAsString, startAsString);
|
||||||
theParams.add(nextEntity);
|
theParams.add(nextEntity);
|
||||||
}
|
}
|
||||||
|
@ -895,26 +1067,19 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private String toRootTypeName(IBase nextObject) {
|
@Override
|
||||||
|
public String toRootTypeName(IBase nextObject) {
|
||||||
BaseRuntimeElementDefinition<?> elementDefinition = getContext().getElementDefinition(nextObject.getClass());
|
BaseRuntimeElementDefinition<?> elementDefinition = getContext().getElementDefinition(nextObject.getClass());
|
||||||
BaseRuntimeElementDefinition<?> rootParentDefinition = elementDefinition.getRootParentDefinition();
|
BaseRuntimeElementDefinition<?> rootParentDefinition = elementDefinition.getRootParentDefinition();
|
||||||
return rootParentDefinition.getName();
|
return rootParentDefinition.getName();
|
||||||
}
|
}
|
||||||
|
|
||||||
private String toTypeName(IBase nextObject) {
|
@Override
|
||||||
|
public String toTypeName(IBase nextObject) {
|
||||||
BaseRuntimeElementDefinition<?> elementDefinition = getContext().getElementDefinition(nextObject.getClass());
|
BaseRuntimeElementDefinition<?> elementDefinition = getContext().getElementDefinition(nextObject.getClass());
|
||||||
return elementDefinition.getName();
|
return elementDefinition.getName();
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings("unchecked")
|
|
||||||
private void addDateTimeTypes(String theResourceType, Set<ResourceIndexedSearchParamDate> theParams, RuntimeSearchParam theSearchParam, IBase theValue) {
|
|
||||||
IPrimitiveType<Date> nextBaseDateTime = (IPrimitiveType<Date>) theValue;
|
|
||||||
if (nextBaseDateTime.getValue() != null) {
|
|
||||||
ResourceIndexedSearchParamDate param = new ResourceIndexedSearchParamDate(myPartitionSettings, theResourceType, theSearchParam.getName(), nextBaseDateTime.getValue(), nextBaseDateTime.getValueAsString(), nextBaseDateTime.getValue(), nextBaseDateTime.getValueAsString(), nextBaseDateTime.getValueAsString());
|
|
||||||
theParams.add(param);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void addUri_Uri(String theResourceType, Set<ResourceIndexedSearchParamUri> theParams, RuntimeSearchParam theSearchParam, IBase theValue) {
|
private void addUri_Uri(String theResourceType, Set<ResourceIndexedSearchParamUri> theParams, RuntimeSearchParam theSearchParam, IBase theValue) {
|
||||||
IPrimitiveType<?> value = (IPrimitiveType<?>) theValue;
|
IPrimitiveType<?> value = (IPrimitiveType<?>) theValue;
|
||||||
String valueAsString = value.getValueAsString();
|
String valueAsString = value.getValueAsString();
|
||||||
|
@ -946,8 +1111,16 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor
|
||||||
}
|
}
|
||||||
|
|
||||||
private void createTokenIndexIfNotBlank(String theResourceType, Set<BaseResourceIndexedSearchParam> theParams, RuntimeSearchParam theSearchParam, String theSystem, String theValue) {
|
private void createTokenIndexIfNotBlank(String theResourceType, Set<BaseResourceIndexedSearchParam> theParams, RuntimeSearchParam theSearchParam, String theSystem, String theValue) {
|
||||||
|
ResourceIndexedSearchParamToken nextEntity = createTokenIndexIfNotBlank(theResourceType, theSearchParam, theSystem, theValue);
|
||||||
|
if (nextEntity != null) {
|
||||||
|
theParams.add(nextEntity);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private ResourceIndexedSearchParamToken createTokenIndexIfNotBlank(String theResourceType, RuntimeSearchParam theSearchParam, String theSystem, String theValue) {
|
||||||
String system = theSystem;
|
String system = theSystem;
|
||||||
String value = theValue;
|
String value = theValue;
|
||||||
|
ResourceIndexedSearchParamToken nextEntity = null;
|
||||||
if (isNotBlank(system) || isNotBlank(value)) {
|
if (isNotBlank(system) || isNotBlank(value)) {
|
||||||
if (system != null && system.length() > ResourceIndexedSearchParamToken.MAX_LENGTH) {
|
if (system != null && system.length() > ResourceIndexedSearchParamToken.MAX_LENGTH) {
|
||||||
system = system.substring(0, ResourceIndexedSearchParamToken.MAX_LENGTH);
|
system = system.substring(0, ResourceIndexedSearchParamToken.MAX_LENGTH);
|
||||||
|
@ -956,10 +1129,10 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor
|
||||||
value = value.substring(0, ResourceIndexedSearchParamToken.MAX_LENGTH);
|
value = value.substring(0, ResourceIndexedSearchParamToken.MAX_LENGTH);
|
||||||
}
|
}
|
||||||
|
|
||||||
ResourceIndexedSearchParamToken nextEntity;
|
|
||||||
nextEntity = new ResourceIndexedSearchParamToken(myPartitionSettings, theResourceType, theSearchParam.getName(), system, value);
|
nextEntity = new ResourceIndexedSearchParamToken(myPartitionSettings, theResourceType, theSearchParam.getName(), system, value);
|
||||||
theParams.add(nextEntity);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return nextEntity;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
|
|
@ -1,6 +1,8 @@
|
||||||
package ca.uhn.fhir.jpa.searchparam.extractor;
|
package ca.uhn.fhir.jpa.searchparam.extractor;
|
||||||
|
|
||||||
import ca.uhn.fhir.context.RuntimeSearchParam;
|
import ca.uhn.fhir.context.RuntimeSearchParam;
|
||||||
|
import ca.uhn.fhir.jpa.model.entity.*;
|
||||||
|
import org.hl7.fhir.instance.model.api.IBase;
|
||||||
import ca.uhn.fhir.jpa.model.entity.BaseResourceIndexedSearchParam;
|
import ca.uhn.fhir.jpa.model.entity.BaseResourceIndexedSearchParam;
|
||||||
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamDate;
|
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamDate;
|
||||||
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamNumber;
|
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamNumber;
|
||||||
|
@ -11,6 +13,7 @@ import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
|
import java.util.Date;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
|
@ -60,6 +63,23 @@ public interface ISearchParamExtractor {
|
||||||
|
|
||||||
List<String> extractParamValuesAsStrings(RuntimeSearchParam theActiveSearchParam, IBaseResource theResource);
|
List<String> extractParamValuesAsStrings(RuntimeSearchParam theActiveSearchParam, IBaseResource theResource);
|
||||||
|
|
||||||
|
List<IBase> extractValues(String thePaths, IBaseResource theResource);
|
||||||
|
|
||||||
|
String toRootTypeName(IBase nextObject);
|
||||||
|
|
||||||
|
String toTypeName(IBase nextObject);
|
||||||
|
|
||||||
|
PathAndRef extractReferenceLinkFromResource(IBase theValue, String thePath);
|
||||||
|
|
||||||
|
Date extractDateFromResource(IBase theValue, String thePath);
|
||||||
|
|
||||||
|
ResourceIndexedSearchParamToken createSearchParamForCoding(String theResourceType, RuntimeSearchParam theSearchParam, IBase theValue);
|
||||||
|
|
||||||
|
String getDisplayTextForCoding(IBase theValue);
|
||||||
|
|
||||||
|
List<IBase> getCodingsFromCodeableConcept(IBase theValue);
|
||||||
|
|
||||||
|
String getDisplayTextFromCodeableConcept(IBase theValue);
|
||||||
|
|
||||||
class SearchParamSet<T> extends HashSet<T> {
|
class SearchParamSet<T> extends HashSet<T> {
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,115 @@
|
||||||
|
package ca.uhn.fhir.jpa.searchparam.util;
|
||||||
|
|
||||||
|
import ca.uhn.fhir.context.FhirContext;
|
||||||
|
import ca.uhn.fhir.context.FhirVersionEnum;
|
||||||
|
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||||
|
|
||||||
|
public class LastNParameterHelper {
|
||||||
|
|
||||||
|
public static boolean isLastNParameter(String theParamName, FhirContext theContext) {
|
||||||
|
if (theParamName == null) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
FhirVersionEnum version = theContext.getVersion().getVersion();
|
||||||
|
|
||||||
|
if (isR5(version) && isLastNParameterR5(theParamName)) {
|
||||||
|
return true;
|
||||||
|
} else if (isR4(version) && isLastNParameterR4(theParamName)) {
|
||||||
|
return true;
|
||||||
|
} else if (isDstu3(version) && isLastNParameterDstu3(theParamName)) {
|
||||||
|
return true;
|
||||||
|
} else {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private static boolean isDstu3(FhirVersionEnum theVersion) {
|
||||||
|
return (theVersion == FhirVersionEnum.DSTU3);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static boolean isR4(FhirVersionEnum theVersion) {
|
||||||
|
return (theVersion == FhirVersionEnum.R4);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static boolean isR5(FhirVersionEnum theVersion) {
|
||||||
|
return (theVersion == FhirVersionEnum.R5);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static boolean isLastNParameterDstu3(String theParamName) {
|
||||||
|
return (theParamName.equals(org.hl7.fhir.dstu3.model.Observation.SP_SUBJECT) || theParamName.equals(org.hl7.fhir.dstu3.model.Observation.SP_PATIENT)
|
||||||
|
|| theParamName.equals(org.hl7.fhir.dstu3.model.Observation.SP_CATEGORY) || theParamName.equals(org.hl7.fhir.r5.model.Observation.SP_CODE));
|
||||||
|
}
|
||||||
|
|
||||||
|
private static boolean isLastNParameterR4(String theParamName) {
|
||||||
|
return (theParamName.equals(org.hl7.fhir.r4.model.Observation.SP_SUBJECT) || theParamName.equals(org.hl7.fhir.r4.model.Observation.SP_PATIENT)
|
||||||
|
|| theParamName.equals(org.hl7.fhir.r4.model.Observation.SP_CATEGORY) || theParamName.equals(org.hl7.fhir.r4.model.Observation.SP_CODE));
|
||||||
|
}
|
||||||
|
|
||||||
|
private static boolean isLastNParameterR5(String theParamName) {
|
||||||
|
return (theParamName.equals(org.hl7.fhir.r5.model.Observation.SP_SUBJECT) || theParamName.equals(org.hl7.fhir.r5.model.Observation.SP_PATIENT)
|
||||||
|
|| theParamName.equals(org.hl7.fhir.r5.model.Observation.SP_CATEGORY) || theParamName.equals(org.hl7.fhir.r5.model.Observation.SP_CODE));
|
||||||
|
}
|
||||||
|
|
||||||
|
public static String getSubjectParamName(FhirContext theContext) {
|
||||||
|
if (theContext.getVersion().getVersion() == FhirVersionEnum.R5) {
|
||||||
|
return org.hl7.fhir.r5.model.Observation.SP_SUBJECT;
|
||||||
|
} else if (theContext.getVersion().getVersion() == FhirVersionEnum.R4) {
|
||||||
|
return org.hl7.fhir.r4.model.Observation.SP_SUBJECT;
|
||||||
|
} else if (theContext.getVersion().getVersion() == FhirVersionEnum.DSTU3) {
|
||||||
|
return org.hl7.fhir.dstu3.model.Observation.SP_SUBJECT;
|
||||||
|
} else {
|
||||||
|
throw new InvalidRequestException("$lastn operation is not implemented for FHIR Version " + theContext.getVersion().getVersion().getFhirVersionString());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static String getPatientParamName(FhirContext theContext) {
|
||||||
|
if (theContext.getVersion().getVersion() == FhirVersionEnum.R5) {
|
||||||
|
return org.hl7.fhir.r5.model.Observation.SP_PATIENT;
|
||||||
|
} else if (theContext.getVersion().getVersion() == FhirVersionEnum.R4) {
|
||||||
|
return org.hl7.fhir.r4.model.Observation.SP_PATIENT;
|
||||||
|
} else if (theContext.getVersion().getVersion() == FhirVersionEnum.DSTU3) {
|
||||||
|
return org.hl7.fhir.dstu3.model.Observation.SP_PATIENT;
|
||||||
|
} else {
|
||||||
|
throw new InvalidRequestException("$lastn operation is not implemented for FHIR Version " + theContext.getVersion().getVersion().getFhirVersionString());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static String getEffectiveParamName(FhirContext theContext) {
|
||||||
|
if (theContext.getVersion().getVersion() == FhirVersionEnum.R5) {
|
||||||
|
return org.hl7.fhir.r5.model.Observation.SP_DATE;
|
||||||
|
} else if (theContext.getVersion().getVersion() == FhirVersionEnum.R4) {
|
||||||
|
return org.hl7.fhir.r4.model.Observation.SP_DATE;
|
||||||
|
} else if (theContext.getVersion().getVersion() == FhirVersionEnum.DSTU3) {
|
||||||
|
return org.hl7.fhir.dstu3.model.Observation.SP_DATE;
|
||||||
|
} else {
|
||||||
|
throw new InvalidRequestException("$lastn operation is not implemented for FHIR Version " + theContext.getVersion().getVersion().getFhirVersionString());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static String getCategoryParamName(FhirContext theContext) {
|
||||||
|
if (theContext.getVersion().getVersion() == FhirVersionEnum.R5) {
|
||||||
|
return org.hl7.fhir.r5.model.Observation.SP_CATEGORY;
|
||||||
|
} else if (theContext.getVersion().getVersion() == FhirVersionEnum.R4) {
|
||||||
|
return org.hl7.fhir.r4.model.Observation.SP_CATEGORY;
|
||||||
|
} else if (theContext.getVersion().getVersion() == FhirVersionEnum.DSTU3) {
|
||||||
|
return org.hl7.fhir.dstu3.model.Observation.SP_CATEGORY;
|
||||||
|
} else {
|
||||||
|
throw new InvalidRequestException("$lastn operation is not implemented for FHIR Version " + theContext.getVersion().getVersion().getFhirVersionString());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static String getCodeParamName(FhirContext theContext) {
|
||||||
|
if (theContext.getVersion().getVersion() == FhirVersionEnum.R5) {
|
||||||
|
return org.hl7.fhir.r5.model.Observation.SP_CODE;
|
||||||
|
} else if (theContext.getVersion().getVersion() == FhirVersionEnum.R4) {
|
||||||
|
return org.hl7.fhir.r4.model.Observation.SP_CODE;
|
||||||
|
} else if (theContext.getVersion().getVersion() == FhirVersionEnum.DSTU3) {
|
||||||
|
return org.hl7.fhir.dstu3.model.Observation.SP_CODE;
|
||||||
|
} else {
|
||||||
|
throw new InvalidRequestException("$lastn operation is not implemented for FHIR Version " + theContext.getVersion().getVersion().getFhirVersionString());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
}
|
|
@ -41,6 +41,7 @@ import javax.mail.internet.MimeMessage;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Date;
|
import java.util.Date;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
import java.util.Properties;
|
||||||
|
|
||||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||||
import static org.apache.commons.lang3.StringUtils.trim;
|
import static org.apache.commons.lang3.StringUtils.trim;
|
||||||
|
@ -53,6 +54,7 @@ public class JavaMailEmailSender implements IEmailSender {
|
||||||
private JavaMailSenderImpl mySender;
|
private JavaMailSenderImpl mySender;
|
||||||
private String mySmtpServerUsername;
|
private String mySmtpServerUsername;
|
||||||
private String mySmtpServerPassword;
|
private String mySmtpServerPassword;
|
||||||
|
private final Properties myJavaMailProperties = new Properties();
|
||||||
|
|
||||||
public String getSmtpServerHostname() {
|
public String getSmtpServerHostname() {
|
||||||
return mySmtpServerHostname;
|
return mySmtpServerHostname;
|
||||||
|
@ -92,6 +94,38 @@ public class JavaMailEmailSender implements IEmailSender {
|
||||||
mySmtpServerUsername = theSmtpServerUsername;
|
mySmtpServerUsername = theSmtpServerUsername;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set the "mail.smtp.auth" Java Mail Property
|
||||||
|
*/
|
||||||
|
|
||||||
|
public void setAuth(Boolean theAuth) {
|
||||||
|
myJavaMailProperties.setProperty("mail.smtp.auth", theAuth.toString());
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set the "mail.smtp.starttls.enable" Java Mail Property
|
||||||
|
*/
|
||||||
|
|
||||||
|
public void setStartTlsEnable(Boolean theStartTlsEnable) {
|
||||||
|
myJavaMailProperties.setProperty("mail.smtp.starttls.enable", theStartTlsEnable.toString());
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set the "mail.smtp.starttls.required" Java Mail Property
|
||||||
|
*/
|
||||||
|
|
||||||
|
public void setStartTlsRequired(Boolean theStartTlsRequired) {
|
||||||
|
myJavaMailProperties.setProperty("mail.smtp.starttls.required", theStartTlsRequired.toString());
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set the "mail.smtp.quitwait" Java Mail Property
|
||||||
|
*/
|
||||||
|
|
||||||
|
public void setQuitWait(Boolean theQuitWait) {
|
||||||
|
myJavaMailProperties.setProperty("mail.smtp.quitwait", theQuitWait.toString());
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void send(EmailDetails theDetails) {
|
public void send(EmailDetails theDetails) {
|
||||||
String subscriptionId = theDetails.getSubscription().toUnqualifiedVersionless().getValue();
|
String subscriptionId = theDetails.getSubscription().toUnqualifiedVersionless().getValue();
|
||||||
|
@ -144,6 +178,7 @@ public class JavaMailEmailSender implements IEmailSender {
|
||||||
mySender.setUsername(getSmtpServerUsername());
|
mySender.setUsername(getSmtpServerUsername());
|
||||||
mySender.setPassword(getSmtpServerPassword());
|
mySender.setPassword(getSmtpServerPassword());
|
||||||
mySender.setDefaultEncoding(Constants.CHARSET_UTF8.name());
|
mySender.setDefaultEncoding(Constants.CHARSET_UTF8.name());
|
||||||
|
mySender.setJavaMailProperties(myJavaMailProperties);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static String toTrimmedCommaSeparatedString(List<String> theTo) {
|
private static String toTrimmedCommaSeparatedString(List<String> theTo) {
|
||||||
|
|
|
@ -24,7 +24,7 @@ import ca.uhn.fhir.rest.api.SearchTotalModeEnum;
|
||||||
public class ${className}ResourceProvider extends
|
public class ${className}ResourceProvider extends
|
||||||
## We have specialized base classes for RPs that handle certain resource types. These
|
## We have specialized base classes for RPs that handle certain resource types. These
|
||||||
## RPs implement type specific operations
|
## RPs implement type specific operations
|
||||||
#if ( $version != 'dstu' && (${className} == 'Encounter' || ${className} == 'Patient' || ${className} == 'ValueSet' || ${className} == 'QuestionnaireAnswers' || ${className} == 'CodeSystem' || ($version != 'dstu2' && ${className} == 'ConceptMap') || ${className} == 'MessageHeader' || ${className} == 'Composition' || ${className} == 'StructureDefinition'))
|
#if ( $version != 'dstu' && (${className} == 'Encounter' || ${className} == 'Patient' || ${className} == 'ValueSet' || ${className} == 'QuestionnaireAnswers' || ${className} == 'CodeSystem' || ($version != 'dstu2' && ${className} == 'ConceptMap') || ${className} == 'MessageHeader' || ${className} == 'Composition' || ${className} == 'StructureDefinition' || ($version != 'dstu2' && ${className} == 'Observation') ))
|
||||||
BaseJpaResourceProvider${className}${versionCapitalized}
|
BaseJpaResourceProvider${className}${versionCapitalized}
|
||||||
#else
|
#else
|
||||||
JpaResourceProvider${versionCapitalized}<${className}>
|
JpaResourceProvider${versionCapitalized}<${className}>
|
||||||
|
|
|
@ -50,6 +50,8 @@
|
||||||
class="ca.uhn.fhir.jpa.dao.FhirResourceDao${res.name}${versionCapitalized}">
|
class="ca.uhn.fhir.jpa.dao.FhirResourceDao${res.name}${versionCapitalized}">
|
||||||
#elseif ( ${versionCapitalized} != 'Dstu1' && ( ${res.name} == 'Bundle' || ${res.name} == 'Encounter' || ${res.name} == 'Composition' || ${res.name} == 'Everything' || ${res.name} == 'Patient' || ${res.name} == 'Subscription' || ${res.name} == 'ValueSet' || ${res.name} == 'QuestionnaireResponse' || ${res.name} == 'SearchParameter'))
|
#elseif ( ${versionCapitalized} != 'Dstu1' && ( ${res.name} == 'Bundle' || ${res.name} == 'Encounter' || ${res.name} == 'Composition' || ${res.name} == 'Everything' || ${res.name} == 'Patient' || ${res.name} == 'Subscription' || ${res.name} == 'ValueSet' || ${res.name} == 'QuestionnaireResponse' || ${res.name} == 'SearchParameter'))
|
||||||
class="ca.uhn.fhir.jpa.dao.FhirResourceDao${res.name}${versionCapitalized}">
|
class="ca.uhn.fhir.jpa.dao.FhirResourceDao${res.name}${versionCapitalized}">
|
||||||
|
#elseif ( ${versionCapitalized} != 'Dstu1' && ${versionCapitalized} != 'Dstu2' && ${res.name} == 'Observation')
|
||||||
|
class="ca.uhn.fhir.jpa.dao.FhirResourceDao${res.name}${versionCapitalized}">
|
||||||
#else
|
#else
|
||||||
class="ca.uhn.fhir.jpa.dao.FhirResourceDao${versionCapitalized}">
|
class="ca.uhn.fhir.jpa.dao.FhirResourceDao${versionCapitalized}">
|
||||||
#end
|
#end
|
||||||
|
|
|
@ -69,6 +69,8 @@ public abstract class BaseJavaConfig${versionCapitalized} extends ca.uhn.fhir.jp
|
||||||
IFhirResourceDaoConceptMap<org.hl7.fhir.${version}.model.ConceptMap>
|
IFhirResourceDaoConceptMap<org.hl7.fhir.${version}.model.ConceptMap>
|
||||||
#elseif ( ${versionCapitalized} != 'Dstu1' && (${res.name} == 'Composition' || ${res.name} == 'Encounter' || ${res.name} == 'Everything' || ${res.name} == 'Patient' || ${res.name} == 'Subscription' || ${res.name} == 'SearchParameter' || ${res.name} == 'MessageHeader' || ${res.name} == 'StructureDefinition'))
|
#elseif ( ${versionCapitalized} != 'Dstu1' && (${res.name} == 'Composition' || ${res.name} == 'Encounter' || ${res.name} == 'Everything' || ${res.name} == 'Patient' || ${res.name} == 'Subscription' || ${res.name} == 'SearchParameter' || ${res.name} == 'MessageHeader' || ${res.name} == 'StructureDefinition'))
|
||||||
IFhirResourceDao${res.name}<${resourcePackage}.${res.declaringClassNameComplete}>
|
IFhirResourceDao${res.name}<${resourcePackage}.${res.declaringClassNameComplete}>
|
||||||
|
#elseif ( ${versionCapitalized} != 'Dstu1' && ${versionCapitalized} != 'Dstu2' && (${res.name} == 'Observation'))
|
||||||
|
IFhirResourceDao${res.name}<${resourcePackage}.${res.declaringClassNameComplete}>
|
||||||
#else
|
#else
|
||||||
IFhirResourceDao<${resourcePackage}.${res.declaringClassNameComplete}>
|
IFhirResourceDao<${resourcePackage}.${res.declaringClassNameComplete}>
|
||||||
#end
|
#end
|
||||||
|
@ -83,6 +85,9 @@ public abstract class BaseJavaConfig${versionCapitalized} extends ca.uhn.fhir.jp
|
||||||
#elseif ( ${res.name} == 'Bundle' || ${res.name} == 'Encounter' || ${res.name} == 'Everything' || ${res.name} == 'Patient' || ${res.name} == 'Subscription' || ${res.name} == 'ValueSet' || ${res.name} == 'QuestionnaireResponse' || ${res.name} == 'SearchParameter' || ${res.name} == 'CodeSystem' || ${res.name} == 'MessageHeader' || ${res.name} == 'Composition' || ${res.name} == 'StructureDefinition')
|
#elseif ( ${res.name} == 'Bundle' || ${res.name} == 'Encounter' || ${res.name} == 'Everything' || ${res.name} == 'Patient' || ${res.name} == 'Subscription' || ${res.name} == 'ValueSet' || ${res.name} == 'QuestionnaireResponse' || ${res.name} == 'SearchParameter' || ${res.name} == 'CodeSystem' || ${res.name} == 'MessageHeader' || ${res.name} == 'Composition' || ${res.name} == 'StructureDefinition')
|
||||||
ca.uhn.fhir.jpa.dao${package_suffix}.FhirResourceDao${res.name}${versionCapitalized} retVal;
|
ca.uhn.fhir.jpa.dao${package_suffix}.FhirResourceDao${res.name}${versionCapitalized} retVal;
|
||||||
retVal = new ca.uhn.fhir.jpa.dao${package_suffix}.FhirResourceDao${res.name}${versionCapitalized}();
|
retVal = new ca.uhn.fhir.jpa.dao${package_suffix}.FhirResourceDao${res.name}${versionCapitalized}();
|
||||||
|
#elseif ( ${versionCapitalized} != 'Dstu1' && ${versionCapitalized} != 'Dstu2' && ${res.name} == 'Observation')
|
||||||
|
ca.uhn.fhir.jpa.dao${package_suffix}.FhirResourceDao${res.name}${versionCapitalized} retVal;
|
||||||
|
retVal = new ca.uhn.fhir.jpa.dao${package_suffix}.FhirResourceDao${res.name}${versionCapitalized}();
|
||||||
#else
|
#else
|
||||||
ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao<${resourcePackage}.${res.declaringClassNameComplete}> retVal;
|
ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao<${resourcePackage}.${res.declaringClassNameComplete}> retVal;
|
||||||
retVal = new ca.uhn.fhir.jpa.dao.JpaResourceDao<${resourcePackage}.${res.declaringClassNameComplete}>();
|
retVal = new ca.uhn.fhir.jpa.dao.JpaResourceDao<${resourcePackage}.${res.declaringClassNameComplete}>();
|
||||||
|
|
23
pom.xml
23
pom.xml
|
@ -1644,7 +1644,7 @@
|
||||||
<plugin>
|
<plugin>
|
||||||
<groupId>org.basepom.maven</groupId>
|
<groupId>org.basepom.maven</groupId>
|
||||||
<artifactId>duplicate-finder-maven-plugin</artifactId>
|
<artifactId>duplicate-finder-maven-plugin</artifactId>
|
||||||
<version>1.3.0</version>
|
<version>1.4.0</version>
|
||||||
</plugin>
|
</plugin>
|
||||||
<plugin>
|
<plugin>
|
||||||
<groupId>de.jpdigital</groupId>
|
<groupId>de.jpdigital</groupId>
|
||||||
|
@ -2043,26 +2043,6 @@
|
||||||
</execution>
|
</execution>
|
||||||
</executions>
|
</executions>
|
||||||
</plugin>
|
</plugin>
|
||||||
<plugin>
|
|
||||||
<groupId>org.apache.maven.plugins</groupId>
|
|
||||||
<artifactId>maven-enforcer-plugin</artifactId>
|
|
||||||
<executions>
|
|
||||||
<execution>
|
|
||||||
<id>enforce-property</id>
|
|
||||||
<goals>
|
|
||||||
<goal>enforce</goal>
|
|
||||||
</goals>
|
|
||||||
<configuration>
|
|
||||||
<rules>
|
|
||||||
<requireJavaVersion>
|
|
||||||
<version>11</version>
|
|
||||||
</requireJavaVersion>
|
|
||||||
</rules>
|
|
||||||
<fail>true</fail>
|
|
||||||
</configuration>
|
|
||||||
</execution>
|
|
||||||
</executions>
|
|
||||||
</plugin>
|
|
||||||
<plugin>
|
<plugin>
|
||||||
<artifactId>maven-antrun-plugin</artifactId>
|
<artifactId>maven-antrun-plugin</artifactId>
|
||||||
<inherited>false</inherited>
|
<inherited>false</inherited>
|
||||||
|
@ -2578,6 +2558,7 @@
|
||||||
<module>hapi-fhir-structures-r5</module>
|
<module>hapi-fhir-structures-r5</module>
|
||||||
<module>hapi-fhir-validation-resources-r5</module>
|
<module>hapi-fhir-validation-resources-r5</module>
|
||||||
<module>hapi-fhir-igpacks</module>
|
<module>hapi-fhir-igpacks</module>
|
||||||
|
<module>hapi-fhir-elasticsearch-6</module>
|
||||||
<module>hapi-fhir-jpaserver-api</module>
|
<module>hapi-fhir-jpaserver-api</module>
|
||||||
<module>hapi-fhir-jpaserver-model</module>
|
<module>hapi-fhir-jpaserver-model</module>
|
||||||
<module>hapi-fhir-jpaserver-searchparam</module>
|
<module>hapi-fhir-jpaserver-searchparam</module>
|
||||||
|
|
Loading…
Reference in New Issue