Merge remote-tracking branch 'origin/master' into im_2021_01_04_package_load_with_partitioning
This commit is contained in:
commit
e23e3c660c
|
@ -17,6 +17,11 @@ jobs:
|
|||
timeoutInMinutes: 360
|
||||
container: maven:3-jdk-11
|
||||
steps:
|
||||
- task: DockerInstaller@0
|
||||
displayName: Docker Installer
|
||||
inputs:
|
||||
dockerVersion: 17.09.0-ce
|
||||
releaseType: stable
|
||||
- task: Cache@2
|
||||
inputs:
|
||||
key: 'maven | "$(Agent.OS)" | ./pom.xml'
|
||||
|
@ -35,8 +40,24 @@ jobs:
|
|||
# These are JVM options (and don't show up in the build logs)
|
||||
mavenOptions: '-Xmx1024m $(MAVEN_OPTS) -Dorg.slf4j.simpleLogger.showDateTime=true -Dorg.slf4j.simpleLogger.dateTimeFormat=HH:mm:ss,SSS -Duser.timezone=America/Toronto'
|
||||
jdkVersionOption: 1.11
|
||||
- task: CopyFiles@2
|
||||
condition: always()
|
||||
inputs:
|
||||
sourceFolder: '$(System.DefaultWorkingDirectory)/'
|
||||
contents: '**/target/*output.txt'
|
||||
targetFolder: '$(Build.ArtifactStagingDirectory)'
|
||||
- task: PublishPipelineArtifact@1
|
||||
displayName: 'Publish Full Test Output'
|
||||
condition: always()
|
||||
inputs:
|
||||
targetPath: '$(Build.ArtifactStagingDirectory)/'
|
||||
artifactName: 'full_logs.zip'
|
||||
- script: bash <(curl https://codecov.io/bash) -t $(CODECOV_TOKEN)
|
||||
displayName: 'codecov'
|
||||
- task: PublishTestResults@2
|
||||
inputs:
|
||||
testResultsFormat: 'JUnit'
|
||||
testResultsFiles: '**/TEST-*.xml'
|
||||
- task: PublishCodeCoverageResults@1
|
||||
inputs:
|
||||
codeCoverageTool: 'JaCoCo'
|
||||
|
|
|
@ -144,6 +144,7 @@ public class StringClientParam extends BaseClientParam implements IParam {
|
|||
return new StringCriterion<>(getParamName(), theValue);
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public ICriterion<StringClientParam> value(IPrimitiveType<String> theValue) {
|
||||
return new StringCriterion<>(getParamName(), theValue.getValue());
|
||||
|
|
|
@ -254,8 +254,13 @@
|
|||
<artifactId>awaitility</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.rauschig</groupId>
|
||||
<artifactId>jarchivelib</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
|
||||
</dependencies>
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
<plugins>
|
||||
|
|
|
@ -23,10 +23,14 @@ package ca.uhn.fhir.jpa.demo;
|
|||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
||||
import ca.uhn.fhir.jpa.model.entity.ModelConfig;
|
||||
import ca.uhn.fhir.jpa.search.LuceneSearchMappingFactory;
|
||||
import ca.uhn.fhir.jpa.search.HapiLuceneAnalysisConfigurer;
|
||||
import org.apache.commons.dbcp2.BasicDataSource;
|
||||
import org.apache.commons.lang3.time.DateUtils;
|
||||
import org.hibernate.dialect.H2Dialect;
|
||||
import org.hibernate.search.backend.lucene.cfg.LuceneBackendSettings;
|
||||
import org.hibernate.search.backend.lucene.cfg.LuceneIndexSettings;
|
||||
import org.hibernate.search.engine.cfg.BackendSettings;
|
||||
import org.hibernate.search.mapper.orm.cfg.HibernateOrmMapperSettings;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
|
||||
|
@ -81,6 +85,8 @@ public class CommonConfig {
|
|||
@Bean
|
||||
public Properties jpaProperties() {
|
||||
Properties extraProperties = new Properties();
|
||||
|
||||
//Regular Hibernate Settings
|
||||
extraProperties.put("hibernate.dialect", H2Dialect.class.getName());
|
||||
extraProperties.put("hibernate.format_sql", "true");
|
||||
extraProperties.put("hibernate.show_sql", "false");
|
||||
|
@ -90,14 +96,13 @@ public class CommonConfig {
|
|||
extraProperties.put("hibernate.cache.use_second_level_cache", "false");
|
||||
extraProperties.put("hibernate.cache.use_structured_entries", "false");
|
||||
extraProperties.put("hibernate.cache.use_minimal_puts", "false");
|
||||
extraProperties.put("hibernate.search.model_mapping", LuceneSearchMappingFactory.class.getName());
|
||||
extraProperties.put("hibernate.search.default.directory_provider", "filesystem");
|
||||
extraProperties.put("hibernate.search.default.indexBase", "target/lucenefiles");
|
||||
extraProperties.put("hibernate.search.lucene_version", "LUCENE_CURRENT");
|
||||
extraProperties.put("hibernate.search.default.worker.execution", "async");
|
||||
|
||||
extraProperties.put(BackendSettings.backendKey(LuceneBackendSettings.ANALYSIS_CONFIGURER), HapiLuceneAnalysisConfigurer.class.getName());
|
||||
extraProperties.put(BackendSettings.backendKey(LuceneIndexSettings.DIRECTORY_TYPE), "local-filesystem");
|
||||
extraProperties.put(BackendSettings.backendKey(LuceneIndexSettings.DIRECTORY_ROOT), "target/lucenefiles");
|
||||
extraProperties.put(BackendSettings.backendKey(LuceneBackendSettings.LUCENE_VERSION), "LUCENE_CURRENT");
|
||||
if (System.getProperty("lowmem") != null) {
|
||||
extraProperties.put("hibernate.search.autoregister_listeners", "false");
|
||||
extraProperties.put(HibernateOrmMapperSettings.ENABLED, "false");
|
||||
}
|
||||
|
||||
return extraProperties;
|
||||
|
|
|
@ -25,6 +25,7 @@ import ca.uhn.fhir.interceptor.executor.InterceptorService;
|
|||
import ca.uhn.fhir.jpa.interceptor.validation.IRepositoryValidatingRule;
|
||||
import ca.uhn.fhir.jpa.interceptor.validation.RepositoryValidatingInterceptor;
|
||||
import ca.uhn.fhir.jpa.interceptor.validation.RepositoryValidatingRuleBuilder;
|
||||
import ca.uhn.fhir.validation.ResultSeverityEnum;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
|
||||
import java.util.List;
|
||||
|
@ -88,6 +89,28 @@ public class RepositoryValidatingInterceptorExamples {
|
|||
//END SNIPPET: requireValidationToDeclaredProfiles
|
||||
}
|
||||
|
||||
public void requireValidationToDeclaredProfilesAdjustThreshold() {
|
||||
RepositoryValidatingRuleBuilder ruleBuilder = myAppCtx.getBean(RepositoryValidatingRuleBuilder.class);
|
||||
|
||||
//START SNIPPET: requireValidationToDeclaredProfilesAdjustThreshold
|
||||
ruleBuilder
|
||||
.forResourcesOfType("Patient")
|
||||
.requireValidationToDeclaredProfiles()
|
||||
.rejectOnSeverity(ResultSeverityEnum.WARNING);
|
||||
//END SNIPPET: requireValidationToDeclaredProfilesAdjustThreshold
|
||||
}
|
||||
|
||||
public void requireValidationToDeclaredProfilesTagOnFailure() {
|
||||
RepositoryValidatingRuleBuilder ruleBuilder = myAppCtx.getBean(RepositoryValidatingRuleBuilder.class);
|
||||
|
||||
//START SNIPPET: requireValidationToDeclaredProfilesTagOnFailure
|
||||
ruleBuilder
|
||||
.forResourcesOfType("Patient")
|
||||
.requireValidationToDeclaredProfiles()
|
||||
.dontReject()
|
||||
.tagOnSeverity(ResultSeverityEnum.ERROR, "http://example.com", "validation-failure");
|
||||
//END SNIPPET: requireValidationToDeclaredProfilesTagOnFailure
|
||||
}
|
||||
|
||||
public void disallowProfiles() {
|
||||
RepositoryValidatingRuleBuilder ruleBuilder = myAppCtx.getBean(RepositoryValidatingRuleBuilder.class);
|
||||
|
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
type: change
|
||||
issue: 2190
|
||||
title: "Updates to Hibernate Search require a full reindexing of all indexed fulltext data, which is held in Lucene or Elasticsearch.
|
||||
Users using elasticsearch for fulltext indexing must upgrade to Elasticsearch 7.10.0."
|
|
@ -0,0 +1,5 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 2262
|
||||
title: "Sorting of search results was not working for MySQL, MSSQL and MariaDB due to recent changes made to handle sorting
|
||||
of nullable columns. This has now been fixed."
|
|
@ -0,0 +1,6 @@
|
|||
---
|
||||
type: fix
|
||||
issue: 2269
|
||||
title: "A database index in the JPA server was added in HAPI FHIR 5.2.0 and Smile CDR 2020.11 that
|
||||
exceeded the maximum index length in MySQL, preventing server upgrades on that database platform.
|
||||
This has been corrected."
|
|
@ -6,9 +6,14 @@
|
|||
<ul>
|
||||
<li>SLF4j (All Modules): 1.7.28 -> 1.7.30</li>
|
||||
<li>Woodstox (XML FHIR Parser): 4.4.1 -> 6.2.3 (Note that the Maven groupId has changed from <code>org.codehaus.woodstox</code> to <code>com.fasterxml.woodstox</code> and the Maven artifactId has changed from <code>woodstox-core-asl</code> to <code>woodstox-core</code> for this library)</li>
|
||||
<li>Hibernate ORM (JPA): 5.4.22 -> 5.4.26</li>
|
||||
<li>Spring (JPA): 5.2.3.RELEASE -> 5.2.9.RELEASE</li>
|
||||
<li>Datasource-Proxy (JPA): 1.5.1 -> 1.7</li>
|
||||
<li>Jetty (JPA Starter): 9.4.30.v20200611 -> 9.4.35.v20201120</li>
|
||||
<li>Guava (JP): 29.0-jre -> 30.1-jre</li>
|
||||
<li>Hibernate ORM (JPA Server): 5.4.22.FINAL -> 5.4.26.FINAL</li>
|
||||
<li>Spring (JPA Server): 5.2.9.RELEASE -> 5.3.2</li>
|
||||
<li>Spring Data (JPA Server): 2.2.0.RELEASE -> 2.4.2</li>
|
||||
<li>Hibernate Search (JPA Server): 5.11.5.FINAL -> 6.0.0.Final</li>
|
||||
<li>Lucene(HAPI FHIR JPA Server): 5.5.5 -> 8.7.0</li>
|
||||
<li>Spring Boot (JPA Starter): 2.2.6.RELEASE -> 2.4.1</li>
|
||||
</ul>"
|
||||
|
|
|
@ -16,7 +16,7 @@ As described in the [FHIR specification](http://hl7.org/fhir/observation-operati
|
|||
|
||||
# Limitations
|
||||
|
||||
Currently only Elasticsearch versions up to 6.5.4 are supported.
|
||||
Currently only Elasticsearch version 7.10.0 is officially supported.
|
||||
|
||||
Search parameters other than those listed above are currently not supported.
|
||||
|
||||
|
|
|
@ -76,6 +76,24 @@ This rule is generally combined with the *Require Profile Declarations* above.
|
|||
|
||||
Any resource creates or updates that do not conform to the given profile will be rejected.
|
||||
|
||||
## Adjusting Failure Threshold
|
||||
|
||||
By default, any validation messages with a severity value of *ERROR* or *FATAL* will result in resource creates or updates being rejected. This threshold can be adjusted however:
|
||||
|
||||
```java
|
||||
{{snippet:classpath:/ca/uhn/hapi/fhir/docs/RepositoryValidatingInterceptorExamples.java|requireValidationToDeclaredProfilesAdjustThreshold}}
|
||||
```
|
||||
|
||||
|
||||
## Tagging Validation Failures
|
||||
|
||||
By default, resource updates/changes resulting in failing validation will cause the operation to be rolled back. You can alternately configure the rule to allow the change to proceed but add an arbitrary tag to the resource when it is saved.
|
||||
|
||||
```java
|
||||
{{snippet:classpath:/ca/uhn/hapi/fhir/docs/RepositoryValidatingInterceptorExamples.java|requireValidationToDeclaredProfilesTagOnFailure}}
|
||||
```
|
||||
|
||||
|
||||
# Rules: Disallow Specific Profiles
|
||||
|
||||
Rules can declare that a specific profile is not allowed.
|
||||
|
|
|
@ -1,144 +0,0 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
|
||||
<parent>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-deployable-pom</artifactId>
|
||||
<version>5.3.0-SNAPSHOT</version>
|
||||
<relativePath>../hapi-deployable-pom/pom.xml</relativePath>
|
||||
</parent>
|
||||
|
||||
<artifactId>hapi-fhir-elasticsearch-6</artifactId>
|
||||
|
||||
<name>hapi-fhir-elasticsearch-6</name>
|
||||
|
||||
<properties>
|
||||
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
||||
<maven.compiler.source>1.7</maven.compiler.source>
|
||||
<maven.compiler.target>1.7</maven.compiler.target>
|
||||
</properties>
|
||||
|
||||
<dependencies>
|
||||
<dependency>
|
||||
<groupId>junit</groupId>
|
||||
<artifactId>junit</artifactId>
|
||||
<version>4.12</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<!-- Elasticsearch -->
|
||||
<dependency>
|
||||
<groupId>org.elasticsearch.client</groupId>
|
||||
<artifactId>elasticsearch-rest-high-level-client</artifactId>
|
||||
<version>6.5.4</version>
|
||||
<exclusions>
|
||||
<!-- The following all need to be excluded to avoid conflicts with Hibernate-Search -->
|
||||
<exclusion>
|
||||
<groupId>com.fasterxml.jackson.core</groupId>
|
||||
<artifactId>jackson-core</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>com.fasterxml.jackson.dataformat</groupId>
|
||||
<artifactId>*</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>com.github.spullara.mustache.java</groupId>
|
||||
<artifactId>compiler</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>com.tdunning</groupId>
|
||||
<artifactId>t-digest</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>commons-codec</groupId>
|
||||
<artifactId>commons-codec</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>commons-logging</groupId>
|
||||
<artifactId>commons-logging</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>net.bytebuddy</groupId>
|
||||
<artifactId>byte-buddy</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>net.sf.jopt-simple</groupId>
|
||||
<artifactId>jopt-simple</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.apache.httpcomponents</groupId>
|
||||
<artifactId>*</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.apache.lucene</groupId>
|
||||
<artifactId>lucene-analyzers-common</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.apache.lucene</groupId>
|
||||
<artifactId>lucene-backward-codecs</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.apache.lucene</groupId>
|
||||
<artifactId>lucene-sandbox</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.elasticsearch</groupId>
|
||||
<artifactId>jna</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.hdrhistogram</groupId>
|
||||
<artifactId>HdrHistogram</artifactId>
|
||||
</exclusion>
|
||||
<exclusion>
|
||||
<groupId>org.yaml</groupId>
|
||||
<artifactId>snakeyaml</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<artifactId>maven-shade-plugin</artifactId>
|
||||
<version>3.2.1</version>
|
||||
<executions>
|
||||
<execution>
|
||||
<phase>package</phase>
|
||||
<goals>
|
||||
<goal>shade</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<shadedArtifactAttached>true</shadedArtifactAttached>
|
||||
<shadedClassifierName>shaded6</shadedClassifierName> <!-- Any name that makes sense -->
|
||||
<relocations>
|
||||
<relocation>
|
||||
<pattern>com.carrotsearch.hppc</pattern>
|
||||
<shadedPattern>com.shadehapi.carrotsearch.hppc</shadedPattern>
|
||||
</relocation>
|
||||
<relocation>
|
||||
<pattern>org.apache.logging.log4j</pattern>
|
||||
<shadedPattern>org.shadehapi.apache.logging.log4j</shadedPattern>
|
||||
</relocation>
|
||||
<relocation>
|
||||
<pattern>org.apache.lucene</pattern>
|
||||
<shadedPattern>org.shadehapi.apache.lucene</shadedPattern>
|
||||
</relocation>
|
||||
<relocation>
|
||||
<pattern>org.elasticsearch</pattern>
|
||||
<shadedPattern>org.shadehapi.elasticsearch</shadedPattern>
|
||||
</relocation>
|
||||
<reloaction>
|
||||
<pattern>org.joda</pattern>
|
||||
<shadedPattern>org.shadehapi.joda</shadedPattern>
|
||||
</reloaction>
|
||||
</relocations>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</project>
|
|
@ -89,8 +89,12 @@
|
|||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.hibernate</groupId>
|
||||
<artifactId>hibernate-search-orm</artifactId>
|
||||
<groupId>org.hibernate.search</groupId>
|
||||
<artifactId>hibernate-search-mapper-orm</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.hibernate.search</groupId>
|
||||
<artifactId>hibernate-search-backend-elasticsearch</artifactId>
|
||||
</dependency>
|
||||
|
||||
<!-- Spring -->
|
||||
|
|
|
@ -150,18 +150,6 @@
|
|||
<artifactId>hapi-fhir-jpaserver-batch</artifactId>
|
||||
<version>${project.version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>ca.uhn.hapi.fhir</groupId>
|
||||
<artifactId>hapi-fhir-elasticsearch-6</artifactId>
|
||||
<version>${project.version}</version>
|
||||
<classifier>shaded6</classifier>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>org.apache.logging.log4j</groupId>
|
||||
<artifactId>log4j-api</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
<groupId>net.ttddyy</groupId>
|
||||
|
@ -478,30 +466,48 @@
|
|||
<artifactId>javax.el</artifactId>
|
||||
</dependency>
|
||||
|
||||
<!-- Note that we need this dependency to send log4j logging requests to slf4j -->
|
||||
<dependency>
|
||||
<groupId>org.apache.logging.log4j</groupId>
|
||||
<artifactId>log4j-to-slf4j</artifactId>
|
||||
</dependency>
|
||||
<!-- Hibernate Search -->
|
||||
<dependency>
|
||||
<groupId>org.hibernate</groupId>
|
||||
<artifactId>hibernate-search-orm</artifactId>
|
||||
<groupId>org.hibernate.search</groupId>
|
||||
<artifactId>hibernate-search-mapper-orm</artifactId>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>org.apache.logging.log4j</groupId>
|
||||
<artifactId>log4j-api</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.lucene</groupId>
|
||||
<artifactId>lucene-highlighter</artifactId>
|
||||
<groupId>org.elasticsearch.client</groupId>
|
||||
<artifactId>elasticsearch-rest-high-level-client</artifactId>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>org.apache.logging.log4j</groupId>
|
||||
<artifactId>log4j-api</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.hibernate.search</groupId>
|
||||
<artifactId>hibernate-search-backend-elasticsearch</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.hibernate.search</groupId>
|
||||
<artifactId>hibernate-search-backend-lucene</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.lucene</groupId>
|
||||
<artifactId>lucene-analyzers-phonetic</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.hibernate</groupId>
|
||||
<artifactId>hibernate-search-elasticsearch</artifactId>
|
||||
<exclusions>
|
||||
<exclusion>
|
||||
<groupId>commons-logging</groupId>
|
||||
<artifactId>commons-logging</artifactId>
|
||||
</exclusion>
|
||||
</exclusions>
|
||||
<groupId>org.apache.lucene</groupId>
|
||||
<artifactId>lucene-backward-codecs</artifactId>
|
||||
</dependency>
|
||||
|
||||
<!-- Misc -->
|
||||
<dependency>
|
||||
<groupId>com.google.guava</groupId>
|
||||
|
@ -579,9 +585,24 @@
|
|||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>pl.allegro.tech</groupId>
|
||||
<artifactId>embedded-elasticsearch</artifactId>
|
||||
<version>2.10.0</version>
|
||||
<groupId>org.hamcrest</groupId>
|
||||
<artifactId>hamcrest</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.testcontainers</groupId>
|
||||
<artifactId>testcontainers</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.testcontainers</groupId>
|
||||
<artifactId>elasticsearch</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.testcontainers</groupId>
|
||||
<artifactId>junit-jupiter</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.hl7.fhir.testcases</groupId>
|
||||
|
|
|
@ -23,6 +23,7 @@ package ca.uhn.fhir.jpa.bulk.job;
|
|||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome;
|
||||
import ca.uhn.fhir.jpa.batch.log.Logs;
|
||||
import ca.uhn.fhir.jpa.bulk.svc.BulkExportDaoSvc;
|
||||
import ca.uhn.fhir.jpa.entity.BulkExportCollectionFileEntity;
|
||||
|
@ -100,10 +101,10 @@ public class ResourceToFileWriter implements ItemWriter<List<IBaseResource>> {
|
|||
binary.setContentType(Constants.CT_FHIR_NDJSON);
|
||||
binary.setContent(myOutputStream.toByteArray());
|
||||
|
||||
return myBinaryDao.create(binary).getResource().getIdElement();
|
||||
DaoMethodOutcome outcome = myBinaryDao.create(binary);
|
||||
return outcome.getResource().getIdElement();
|
||||
}
|
||||
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private IFhirResourceDao<IBaseBinary> getBinaryDao() {
|
||||
return myDaoRegistry.getResourceDao("Binary");
|
||||
|
|
|
@ -473,8 +473,8 @@ public abstract class BaseConfig {
|
|||
}
|
||||
|
||||
@Bean
|
||||
public HibernateDialectProvider hibernateDialectProvider() {
|
||||
return new HibernateDialectProvider();
|
||||
public HibernatePropertiesProvider HibernatePropertiesProvider() {
|
||||
return new HibernatePropertiesProvider();
|
||||
}
|
||||
|
||||
@Bean
|
||||
|
|
|
@ -21,17 +21,20 @@ package ca.uhn.fhir.jpa.config;
|
|||
*/
|
||||
|
||||
import ca.uhn.fhir.util.ReflectionUtil;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.hibernate.dialect.Dialect;
|
||||
import org.hibernate.search.engine.cfg.BackendSettings;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean;
|
||||
|
||||
public class HibernateDialectProvider {
|
||||
public class HibernatePropertiesProvider {
|
||||
|
||||
@Autowired
|
||||
private LocalContainerEntityManagerFactoryBean myEntityManagerFactory;
|
||||
private Dialect myDialect;
|
||||
private String myHibernateSearchBackend;
|
||||
|
||||
@VisibleForTesting
|
||||
public void setDialectForUnitTest(Dialect theDialect) {
|
||||
|
@ -49,4 +52,13 @@ public class HibernateDialectProvider {
|
|||
return dialect;
|
||||
}
|
||||
|
||||
public String getHibernateSearchBackend(){
|
||||
String hibernateSearchBackend = myHibernateSearchBackend;
|
||||
if (StringUtils.isBlank(hibernateSearchBackend)) {
|
||||
hibernateSearchBackend = (String) myEntityManagerFactory.getJpaPropertyMap().get(BackendSettings.backendKey(BackendSettings.TYPE));
|
||||
Validate.notNull(hibernateSearchBackend, BackendSettings.backendKey(BackendSettings.TYPE) + " property is unset!");
|
||||
myHibernateSearchBackend = hibernateSearchBackend;
|
||||
}
|
||||
return myHibernateSearchBackend;
|
||||
}
|
||||
}
|
|
@ -1031,8 +1031,8 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
|
||||
entity.setDeleted(theDeletedTimestampOrNull);
|
||||
entity.setUpdated(theDeletedTimestampOrNull);
|
||||
entity.setNarrativeTextParsedIntoWords(null);
|
||||
entity.setContentTextParsedIntoWords(null);
|
||||
entity.setNarrativeText(null);
|
||||
entity.setContentText(null);
|
||||
entity.setHashSha256(null);
|
||||
entity.setIndexStatus(INDEX_STATUS_INDEXED);
|
||||
changed = populateResourceIntoEntity(theRequest, theResource, entity, true);
|
||||
|
@ -1058,8 +1058,8 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
|
||||
newParams.populateResourceTableSearchParamsPresentFlags(entity);
|
||||
entity.setIndexStatus(INDEX_STATUS_INDEXED);
|
||||
populateFullTextFields(myContext, theResource, entity);
|
||||
}
|
||||
populateFullTextFields(myContext, theResource, entity);
|
||||
} else {
|
||||
|
||||
changed = populateResourceIntoEntity(theRequest, theResource, entity, false);
|
||||
|
@ -1481,11 +1481,11 @@ public abstract class BaseHapiFhirDao<T extends IBaseResource> extends BaseStora
|
|||
|
||||
public static void populateFullTextFields(final FhirContext theContext, final IBaseResource theResource, ResourceTable theEntity) {
|
||||
if (theEntity.getDeleted() != null) {
|
||||
theEntity.setNarrativeTextParsedIntoWords(null);
|
||||
theEntity.setContentTextParsedIntoWords(null);
|
||||
theEntity.setNarrativeText(null);
|
||||
theEntity.setContentText(null);
|
||||
} else {
|
||||
theEntity.setNarrativeTextParsedIntoWords(parseNarrativeTextIntoWords(theResource));
|
||||
theEntity.setContentTextParsedIntoWords(parseContentTextIntoWords(theContext, theResource));
|
||||
theEntity.setNarrativeText(parseNarrativeTextIntoWords(theResource));
|
||||
theEntity.setContentText(parseContentTextIntoWords(theContext, theResource));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -635,7 +635,6 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
IBaseResource oldVersion = toResource(theEntity, false);
|
||||
|
||||
List<TagDefinition> tags = toTagList(theMetaAdd);
|
||||
|
||||
for (TagDefinition nextDef : tags) {
|
||||
|
||||
boolean hasTag = false;
|
||||
|
@ -663,9 +662,10 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
|
||||
validateMetaCount(theEntity.getTags().size());
|
||||
|
||||
theEntity = myEntityManager.merge(theEntity);
|
||||
myEntityManager.merge(theEntity);
|
||||
|
||||
// Interceptor call: STORAGE_PRECOMMIT_RESOURCE_UPDATED
|
||||
// Interceptor call: STORAGE_PRESTORAGE_RESOURCE_UPDATED
|
||||
IBaseResource newVersion = toResource(theEntity, false);
|
||||
HookParams params = new HookParams()
|
||||
.add(IBaseResource.class, oldVersion)
|
||||
|
@ -673,6 +673,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
.add(RequestDetails.class, theRequestDetails)
|
||||
.addIfMatchesType(ServletRequestDetails.class, theRequestDetails)
|
||||
.add(TransactionDetails.class, theTransactionDetails);
|
||||
myInterceptorBroadcaster.callHooks(Pointcut.STORAGE_PRESTORAGE_RESOURCE_UPDATED, params);
|
||||
myInterceptorBroadcaster.callHooks(Pointcut.STORAGE_PRECOMMIT_RESOURCE_UPDATED, params);
|
||||
|
||||
}
|
||||
|
@ -681,6 +682,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
|
||||
IBaseResource oldVersion = toResource(theEntity, false);
|
||||
|
||||
|
||||
List<TagDefinition> tags = toTagList(theMetaDel);
|
||||
|
||||
for (TagDefinition nextDef : tags) {
|
||||
|
@ -708,6 +710,7 @@ public abstract class BaseHapiFhirResourceDao<T extends IBaseResource> extends B
|
|||
.add(RequestDetails.class, theRequestDetails)
|
||||
.addIfMatchesType(ServletRequestDetails.class, theRequestDetails)
|
||||
.add(TransactionDetails.class, theTransactionDetails);
|
||||
myInterceptorBroadcaster.callHooks(Pointcut.STORAGE_PRESTORAGE_RESOURCE_UPDATED, params);
|
||||
myInterceptorBroadcaster.callHooks(Pointcut.STORAGE_PRECOMMIT_RESOURCE_UPDATED, params);
|
||||
|
||||
}
|
||||
|
|
|
@ -148,7 +148,7 @@ public abstract class BaseStorageDao {
|
|||
}
|
||||
|
||||
outcome.setId(id);
|
||||
if (theEntity.isDeleted() == false) {
|
||||
if (theEntity.getDeleted() == null) {
|
||||
outcome.setResource(theResource);
|
||||
}
|
||||
outcome.setEntity(theEntity);
|
||||
|
|
|
@ -21,7 +21,6 @@ package ca.uhn.fhir.jpa.dao;
|
|||
*/
|
||||
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.dao.data.IForcedIdDao;
|
||||
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
|
||||
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
||||
|
@ -41,17 +40,21 @@ import com.google.common.collect.Sets;
|
|||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.apache.lucene.analysis.Analyzer;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.analysis.TokenStream;
|
||||
import org.apache.lucene.search.highlight.Formatter;
|
||||
import org.apache.lucene.search.highlight.Highlighter;
|
||||
import org.apache.lucene.search.highlight.QueryScorer;
|
||||
import org.apache.lucene.search.highlight.Scorer;
|
||||
import org.apache.lucene.search.highlight.TextFragment;
|
||||
import org.apache.lucene.search.highlight.TokenGroup;
|
||||
import org.hibernate.search.jpa.FullTextEntityManager;
|
||||
import org.hibernate.search.jpa.FullTextQuery;
|
||||
import org.hibernate.search.query.dsl.BooleanJunction;
|
||||
import org.hibernate.search.query.dsl.QueryBuilder;
|
||||
import org.hibernate.search.backend.lucene.index.LuceneIndexManager;
|
||||
import org.hibernate.search.engine.search.predicate.dsl.BooleanPredicateClausesStep;
|
||||
import org.hibernate.search.engine.search.predicate.dsl.SearchPredicateFactory;
|
||||
import org.hibernate.search.engine.search.query.SearchQuery;
|
||||
import org.hibernate.search.mapper.orm.Search;
|
||||
import org.hibernate.search.mapper.orm.mapping.SearchMapping;
|
||||
import org.hibernate.search.mapper.orm.session.SearchSession;
|
||||
import org.hl7.fhir.instance.model.api.IAnyResource;
|
||||
import org.jetbrains.annotations.NotNull;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.transaction.PlatformTransactionManager;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
@ -60,12 +63,14 @@ import org.springframework.transaction.support.TransactionTemplate;
|
|||
import javax.persistence.EntityManager;
|
||||
import javax.persistence.PersistenceContext;
|
||||
import javax.persistence.PersistenceContextType;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
|
||||
|
@ -74,15 +79,13 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc {
|
|||
|
||||
@PersistenceContext(type = PersistenceContextType.TRANSACTION)
|
||||
private EntityManager myEntityManager;
|
||||
|
||||
@Autowired
|
||||
private PlatformTransactionManager myTxManager;
|
||||
|
||||
@Autowired
|
||||
protected IForcedIdDao myForcedIdDao;
|
||||
|
||||
@Autowired
|
||||
private DaoConfig myDaoConfig;
|
||||
|
||||
@Autowired
|
||||
private IdHelperService myIdHelperService;
|
||||
|
||||
|
@ -95,136 +98,81 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc {
|
|||
super();
|
||||
}
|
||||
|
||||
private void addTextSearch(QueryBuilder theQueryBuilder, BooleanJunction<?> theBoolean, List<List<IQueryParameterType>> theTerms, String theFieldName, String theFieldNameEdgeNGram, String theFieldNameNGram) {
|
||||
private void addTextSearch(SearchPredicateFactory f, BooleanPredicateClausesStep<?> b, List<List<IQueryParameterType>> theTerms, String theFieldName, String theFieldNameEdgeNGram, String theFieldNameTextNGram){
|
||||
if (theTerms == null) {
|
||||
return;
|
||||
}
|
||||
for (List<? extends IQueryParameterType> nextAnd : theTerms) {
|
||||
Set<String> terms = new HashSet<>();
|
||||
for (IQueryParameterType nextOr : nextAnd) {
|
||||
StringParam nextOrString = (StringParam) nextOr;
|
||||
String nextValueTrimmed = StringUtils.defaultString(nextOrString.getValue()).trim();
|
||||
if (isNotBlank(nextValueTrimmed)) {
|
||||
terms.add(nextValueTrimmed);
|
||||
}
|
||||
}
|
||||
if (terms.isEmpty() == false) {
|
||||
Set<String> terms = extractOrStringParams(nextAnd);
|
||||
if (terms.size() == 1) {
|
||||
//@formatter:off
|
||||
Query textQuery = theQueryBuilder
|
||||
.phrase()
|
||||
.withSlop(2)
|
||||
.onField(theFieldName).boostedTo(4.0f)
|
||||
// .andField(theFieldNameEdgeNGram).boostedTo(2.0f)
|
||||
// .andField(theFieldNameNGram).boostedTo(1.0f)
|
||||
.sentence(terms.iterator().next().toLowerCase()).createQuery();
|
||||
//@formatter:on
|
||||
|
||||
theBoolean.must(textQuery);
|
||||
} else {
|
||||
b.must(f.phrase()
|
||||
.field(theFieldName)
|
||||
.boost(4.0f)
|
||||
.matching(terms.iterator().next().toLowerCase())
|
||||
.slop(2));
|
||||
} else if (terms.size() > 1){
|
||||
String joinedTerms = StringUtils.join(terms, ' ');
|
||||
theBoolean.must(theQueryBuilder.keyword().onField(theFieldName).matching(joinedTerms).createQuery());
|
||||
b.must(f.match().field(theFieldName).matching(joinedTerms));
|
||||
} else {
|
||||
ourLog.debug("No Terms found in query parameter {}", nextAnd);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@NotNull
|
||||
private Set<String> extractOrStringParams(List<? extends IQueryParameterType> nextAnd) {
|
||||
Set<String> terms = new HashSet<>();
|
||||
for (IQueryParameterType nextOr : nextAnd) {
|
||||
StringParam nextOrString = (StringParam) nextOr;
|
||||
String nextValueTrimmed = StringUtils.defaultString(nextOrString.getValue()).trim();
|
||||
if (isNotBlank(nextValueTrimmed)) {
|
||||
terms.add(nextValueTrimmed);
|
||||
}
|
||||
}
|
||||
return terms;
|
||||
}
|
||||
|
||||
private List<ResourcePersistentId> doSearch(String theResourceName, SearchParameterMap theParams, ResourcePersistentId theReferencingPid) {
|
||||
FullTextEntityManager em = org.hibernate.search.jpa.Search.getFullTextEntityManager(myEntityManager);
|
||||
|
||||
List<ResourcePersistentId> pids = null;
|
||||
|
||||
/*
|
||||
* Handle textual params
|
||||
*/
|
||||
/*
|
||||
for (String nextParamName : theParams.keySet()) {
|
||||
for (List<? extends IQueryParameterType> nextAndList : theParams.get(nextParamName)) {
|
||||
for (Iterator<? extends IQueryParameterType> orIterator = nextAndList.iterator(); orIterator.hasNext();) {
|
||||
IQueryParameterType nextParam = orIterator.next();
|
||||
if (nextParam instanceof TokenParam) {
|
||||
TokenParam nextTokenParam = (TokenParam) nextParam;
|
||||
if (nextTokenParam.isText()) {
|
||||
orIterator.remove();
|
||||
QueryBuilder qb = em.getSearchFactory().buildQueryBuilder().forEntity(ResourceIndexedSearchParamString.class).get();
|
||||
BooleanJunction<?> bool = qb.bool();
|
||||
|
||||
bool.must(qb.keyword().onField("myParamName").matching(nextParamName).createQuery());
|
||||
if (isNotBlank(theResourceName)) {
|
||||
bool.must(qb.keyword().onField("myResourceType").matching(theResourceName).createQuery());
|
||||
}
|
||||
//
|
||||
//@formatter:off
|
||||
String value = nextTokenParam.getValue().toLowerCase();
|
||||
bool.must(qb.keyword().onField("myValueTextEdgeNGram").matching(value).createQuery());
|
||||
|
||||
//@formatter:on
|
||||
|
||||
FullTextQuery ftq = em.createFullTextQuery(bool.createQuery(), ResourceIndexedSearchParamString.class);
|
||||
|
||||
List<?> resultList = ftq.getResultList();
|
||||
pids = new ArrayList<Long>();
|
||||
for (Object next : resultList) {
|
||||
ResourceIndexedSearchParamString nextAsArray = (ResourceIndexedSearchParamString) next;
|
||||
pids.add(nextAsArray.getResourcePid());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (pids != null && pids.isEmpty()) {
|
||||
return pids;
|
||||
}
|
||||
*/
|
||||
|
||||
QueryBuilder qb = em.getSearchFactory().buildQueryBuilder().forEntity(ResourceTable.class).get();
|
||||
BooleanJunction<?> bool = qb.bool();
|
||||
|
||||
/*
|
||||
* Handle _content parameter (resource body content)
|
||||
*/
|
||||
SearchSession session = Search.session(myEntityManager);
|
||||
List<List<IQueryParameterType>> contentAndTerms = theParams.remove(Constants.PARAM_CONTENT);
|
||||
addTextSearch(qb, bool, contentAndTerms, "myContentText", "myContentTextEdgeNGram", "myContentTextNGram");
|
||||
|
||||
/*
|
||||
* Handle _text parameter (resource narrative content)
|
||||
*/
|
||||
List<List<IQueryParameterType>> textAndTerms = theParams.remove(Constants.PARAM_TEXT);
|
||||
addTextSearch(qb, bool, textAndTerms, "myNarrativeText", "myNarrativeTextEdgeNGram", "myNarrativeTextNGram");
|
||||
|
||||
if (theReferencingPid != null) {
|
||||
bool.must(qb.keyword().onField("myResourceLinksField").matching(theReferencingPid.toString()).createQuery());
|
||||
}
|
||||
List<Long> longPids = session.search(ResourceTable.class)
|
||||
//Selects are replacements for projection and convert more cleanly than the old implementation.
|
||||
.select(
|
||||
f -> f.field("myId", Long.class)
|
||||
)
|
||||
.where(
|
||||
f -> f.bool(b -> {
|
||||
/*
|
||||
* Handle _content parameter (resource body content)
|
||||
*/
|
||||
addTextSearch(f, b, contentAndTerms, "myContentText", "mycontentTextEdgeNGram", "myContentTextNGram");
|
||||
/*
|
||||
* Handle _text parameter (resource narrative content)
|
||||
*/
|
||||
addTextSearch(f, b, textAndTerms, "myNarrativeText", "myNarrativeTextEdgeNGram", "myNarrativeTextNGram");
|
||||
|
||||
if (bool.isEmpty()) {
|
||||
return pids;
|
||||
}
|
||||
if (theReferencingPid != null) {
|
||||
b.must(f.match().field("myResourceLinksField").matching(theReferencingPid.toString()));
|
||||
}
|
||||
|
||||
if (isNotBlank(theResourceName)) {
|
||||
bool.must(qb.keyword().onField("myResourceType").matching(theResourceName).createQuery());
|
||||
}
|
||||
//DROP EARLY HERE IF BOOL IS EMPTY?
|
||||
|
||||
Query luceneQuery = bool.createQuery();
|
||||
if (isNotBlank(theResourceName)) {
|
||||
b.must(f.match().field("myResourceType").matching(theResourceName));
|
||||
}
|
||||
})
|
||||
).fetchAllHits();
|
||||
|
||||
// wrap Lucene query in a javax.persistence.SqlQuery
|
||||
FullTextQuery jpaQuery = em.createFullTextQuery(luceneQuery, ResourceTable.class);
|
||||
jpaQuery.setProjection("myId");
|
||||
return convertLongsToResourcePersistentIds(longPids);
|
||||
}
|
||||
|
||||
// execute search
|
||||
List<?> result = jpaQuery.getResultList();
|
||||
|
||||
ArrayList<ResourcePersistentId> retVal = new ArrayList<>();
|
||||
for (Object object : result) {
|
||||
Object[] nextArray = (Object[]) object;
|
||||
Long next = (Long) nextArray[0];
|
||||
if (next != null) {
|
||||
retVal.add(new ResourcePersistentId(next));
|
||||
}
|
||||
}
|
||||
|
||||
return retVal;
|
||||
private List<ResourcePersistentId> convertLongsToResourcePersistentIds(List<Long> theLongPids) {
|
||||
return theLongPids.stream()
|
||||
.map(pid -> new ResourcePersistentId(pid))
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -259,8 +207,8 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc {
|
|||
if (retVal == null) {
|
||||
retVal = new TransactionTemplate(myTxManager).execute(t -> {
|
||||
try {
|
||||
FullTextEntityManager em = org.hibernate.search.jpa.Search.getFullTextEntityManager(myEntityManager);
|
||||
em.getSearchFactory().buildQueryBuilder().forEntity(ResourceTable.class).get();
|
||||
SearchSession searchSession = Search.session(myEntityManager);
|
||||
searchSession.search(ResourceTable.class);
|
||||
return Boolean.FALSE;
|
||||
} catch (Exception e) {
|
||||
ourLog.trace("FullText test failed", e);
|
||||
|
@ -287,172 +235,4 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc {
|
|||
@Autowired
|
||||
private PartitionSettings myPartitionSettings;
|
||||
|
||||
@Transactional()
|
||||
@Override
|
||||
public List<Suggestion> suggestKeywords(String theContext, String theSearchParam, String theText, RequestDetails theRequest) {
|
||||
Validate.notBlank(theContext, "theContext must be provided");
|
||||
Validate.notBlank(theSearchParam, "theSearchParam must be provided");
|
||||
Validate.notBlank(theText, "theSearchParam must be provided");
|
||||
|
||||
long start = System.currentTimeMillis();
|
||||
|
||||
String[] contextParts = StringUtils.split(theContext, '/');
|
||||
if (contextParts.length != 3 || "Patient".equals(contextParts[0]) == false || "$everything".equals(contextParts[2]) == false) {
|
||||
throw new InvalidRequestException("Invalid context: " + theContext);
|
||||
}
|
||||
|
||||
// Partitioning is not supported for this operation
|
||||
Validate.isTrue(myPartitionSettings.isPartitioningEnabled() == false, "Suggest keywords not supported for partitioned system");
|
||||
RequestPartitionId requestPartitionId = null;
|
||||
|
||||
ResourcePersistentId pid = myIdHelperService.resolveResourcePersistentIds(requestPartitionId, contextParts[0], contextParts[1]);
|
||||
|
||||
FullTextEntityManager em = org.hibernate.search.jpa.Search.getFullTextEntityManager(myEntityManager);
|
||||
|
||||
QueryBuilder qb = em.getSearchFactory().buildQueryBuilder().forEntity(ResourceTable.class).get();
|
||||
|
||||
Query textQuery = qb
|
||||
.phrase()
|
||||
.withSlop(2)
|
||||
.onField("myContentText").boostedTo(4.0f)
|
||||
.andField("myContentTextEdgeNGram").boostedTo(2.0f)
|
||||
.andField("myContentTextNGram").boostedTo(1.0f)
|
||||
.andField("myContentTextPhonetic").boostedTo(0.5f)
|
||||
.sentence(theText.toLowerCase()).createQuery();
|
||||
|
||||
Query query = qb.bool()
|
||||
.must(qb.keyword().onField("myResourceLinksField").matching(pid.toString()).createQuery())
|
||||
.must(textQuery)
|
||||
.createQuery();
|
||||
|
||||
FullTextQuery ftq = em.createFullTextQuery(query, ResourceTable.class);
|
||||
ftq.setProjection("myContentText");
|
||||
ftq.setMaxResults(20);
|
||||
|
||||
List<?> resultList = ftq.getResultList();
|
||||
List<Suggestion> suggestions = Lists.newArrayList();
|
||||
for (Object next : resultList) {
|
||||
Object[] nextAsArray = (Object[]) next;
|
||||
String nextValue = (String) nextAsArray[0];
|
||||
|
||||
try {
|
||||
MySuggestionFormatter formatter = new MySuggestionFormatter(theText, suggestions);
|
||||
Scorer scorer = new QueryScorer(textQuery);
|
||||
Highlighter highlighter = new Highlighter(formatter, scorer);
|
||||
Analyzer analyzer = em.getSearchFactory().getAnalyzer(ResourceTable.class);
|
||||
|
||||
formatter.setAnalyzer("myContentTextPhonetic");
|
||||
highlighter.getBestFragments(analyzer.tokenStream("myContentTextPhonetic", nextValue), nextValue, 10);
|
||||
|
||||
formatter.setAnalyzer("myContentTextNGram");
|
||||
highlighter.getBestFragments(analyzer.tokenStream("myContentTextNGram", nextValue), nextValue, 10);
|
||||
|
||||
formatter.setFindPhrasesWith();
|
||||
formatter.setAnalyzer("myContentTextEdgeNGram");
|
||||
highlighter.getBestFragments(analyzer.tokenStream("myContentTextEdgeNGram", nextValue), nextValue, 10);
|
||||
|
||||
} catch (Exception e) {
|
||||
throw new InternalErrorException(e);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
Collections.sort(suggestions);
|
||||
|
||||
Set<String> terms = Sets.newHashSet();
|
||||
for (Iterator<Suggestion> iter = suggestions.iterator(); iter.hasNext(); ) {
|
||||
String nextTerm = iter.next().getTerm().toLowerCase();
|
||||
if (!terms.add(nextTerm)) {
|
||||
iter.remove();
|
||||
}
|
||||
}
|
||||
|
||||
long delay = System.currentTimeMillis() - start;
|
||||
ourLog.info("Provided {} suggestions for term {} in {} ms", terms.size(), theText, delay);
|
||||
|
||||
return suggestions;
|
||||
}
|
||||
|
||||
public class MySuggestionFormatter implements Formatter {
|
||||
|
||||
private List<Suggestion> mySuggestions;
|
||||
private String myAnalyzer;
|
||||
private ArrayList<String> myPartialMatchPhrases;
|
||||
private ArrayList<Float> myPartialMatchScores;
|
||||
private String myOriginalSearch;
|
||||
|
||||
MySuggestionFormatter(String theOriginalSearch, List<Suggestion> theSuggestions) {
|
||||
myOriginalSearch = theOriginalSearch;
|
||||
mySuggestions = theSuggestions;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String highlightTerm(String theOriginalText, TokenGroup theTokenGroup) {
|
||||
ourLog.debug("{} Found {} with score {}", myAnalyzer, theOriginalText, theTokenGroup.getTotalScore());
|
||||
if (theTokenGroup.getTotalScore() > 0) {
|
||||
float score = theTokenGroup.getTotalScore();
|
||||
if (theOriginalText.equalsIgnoreCase(myOriginalSearch)) {
|
||||
score = score + 1.0f;
|
||||
}
|
||||
mySuggestions.add(new Suggestion(theOriginalText, score));
|
||||
} else if (myPartialMatchPhrases != null) {
|
||||
if (theOriginalText.length() < 100) {
|
||||
for (int i = 0; i < myPartialMatchPhrases.size(); i++) {
|
||||
if (theOriginalText.contains(myPartialMatchPhrases.get(i))) {
|
||||
mySuggestions.add(new Suggestion(theOriginalText, myPartialMatchScores.get(i) - 0.5f));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
void setAnalyzer(String theString) {
|
||||
myAnalyzer = theString;
|
||||
}
|
||||
|
||||
void setFindPhrasesWith() {
|
||||
myPartialMatchPhrases = new ArrayList<>();
|
||||
myPartialMatchScores = new ArrayList<>();
|
||||
|
||||
for (Suggestion next : mySuggestions) {
|
||||
myPartialMatchPhrases.add(' ' + next.myTerm);
|
||||
myPartialMatchScores.add(next.myScore);
|
||||
}
|
||||
|
||||
myPartialMatchPhrases.add(myOriginalSearch);
|
||||
myPartialMatchScores.add(1.0f);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public static class Suggestion implements Comparable<Suggestion> {
|
||||
private String myTerm;
|
||||
private float myScore;
|
||||
|
||||
Suggestion(String theTerm, float theScore) {
|
||||
myTerm = theTerm;
|
||||
myScore = theScore;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int compareTo(Suggestion theO) {
|
||||
return Float.compare(theO.myScore, myScore);
|
||||
}
|
||||
|
||||
public float getScore() {
|
||||
return myScore;
|
||||
}
|
||||
|
||||
public String getTerm() {
|
||||
return myTerm;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "Suggestion[myTerm=" + myTerm + ", myScore=" + myScore + "]";
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -22,15 +22,13 @@ package ca.uhn.fhir.jpa.dao;
|
|||
|
||||
import java.util.List;
|
||||
|
||||
import ca.uhn.fhir.jpa.dao.FulltextSearchSvcImpl.Suggestion;
|
||||
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
|
||||
public interface IFulltextSearchSvc {
|
||||
|
||||
List<Suggestion> suggestKeywords(String theContext, String theSearchParam, String theText, RequestDetails theRequest);
|
||||
|
||||
|
||||
List<ResourcePersistentId> search(String theResourceName, SearchParameterMap theParams);
|
||||
|
||||
List<ResourcePersistentId> everything(String theResourceName, SearchParameterMap theParams, RequestDetails theRequest);
|
||||
|
|
|
@ -1,29 +0,0 @@
|
|||
package ca.uhn.fhir.jpa.dao;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import org.springframework.data.jpa.repository.JpaRepository;
|
||||
|
||||
public interface IHapiJpaRepository<T> extends JpaRepository<T, Long> {
|
||||
|
||||
void deleteByPid(Long theId);
|
||||
|
||||
}
|
|
@ -35,10 +35,6 @@ public interface ITermCodeSystemVersionDao extends JpaRepository<TermCodeSystemV
|
|||
@Query("DELETE FROM TermCodeSystemVersion csv WHERE csv.myCodeSystem = :cs")
|
||||
void deleteForCodeSystem(@Param("cs") TermCodeSystem theCodeSystem);
|
||||
|
||||
@Modifying
|
||||
@Query("DELETE FROM TermCodeSystemVersion csv WHERE csv.myId = :pid")
|
||||
void delete(@Param("pid") Long codesystemversion_pid);
|
||||
|
||||
@Query("SELECT cs FROM TermCodeSystemVersion cs WHERE cs.myCodeSystemPid = :codesystem_pid")
|
||||
List<TermCodeSystemVersion> findByCodeSystemPid(@Param("codesystem_pid") Long theCodeSystemPid);
|
||||
|
||||
|
|
|
@ -1,12 +1,11 @@
|
|||
package ca.uhn.fhir.jpa.dao.data;
|
||||
|
||||
import ca.uhn.fhir.jpa.dao.IHapiJpaRepository;
|
||||
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
|
||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||
import org.springframework.data.domain.Page;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.data.domain.Slice;
|
||||
import org.springframework.data.jpa.repository.Modifying;
|
||||
import org.springframework.data.jpa.repository.JpaRepository;
|
||||
import org.springframework.data.jpa.repository.Query;
|
||||
import org.springframework.data.repository.query.Param;
|
||||
|
||||
|
@ -33,7 +32,7 @@ import java.util.Optional;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
public interface ITermConceptDao extends IHapiJpaRepository<TermConcept> {
|
||||
public interface ITermConceptDao extends JpaRepository<TermConcept, Long> {
|
||||
|
||||
@Query("SELECT COUNT(t) FROM TermConcept t WHERE t.myCodeSystem.myId = :cs_pid")
|
||||
Integer countByCodeSystemVersion(@Param("cs_pid") Long thePid);
|
||||
|
@ -50,9 +49,4 @@ public interface ITermConceptDao extends IHapiJpaRepository<TermConcept> {
|
|||
@Query("SELECT t FROM TermConcept t WHERE t.myIndexStatus = null")
|
||||
Page<TermConcept> findResourcesRequiringReindexing(Pageable thePageRequest);
|
||||
|
||||
@Override
|
||||
@Modifying
|
||||
@Query("DELETE FROM TermConcept t WHERE t.myId = :pid")
|
||||
void deleteByPid(@Param("pid") Long theId);
|
||||
|
||||
}
|
||||
|
|
|
@ -1,10 +1,9 @@
|
|||
package ca.uhn.fhir.jpa.dao.data;
|
||||
|
||||
import ca.uhn.fhir.jpa.dao.IHapiJpaRepository;
|
||||
import ca.uhn.fhir.jpa.entity.TermConceptDesignation;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.data.domain.Slice;
|
||||
import org.springframework.data.jpa.repository.Modifying;
|
||||
import org.springframework.data.jpa.repository.JpaRepository;
|
||||
import org.springframework.data.jpa.repository.Query;
|
||||
import org.springframework.data.repository.query.Param;
|
||||
|
||||
|
@ -28,7 +27,7 @@ import org.springframework.data.repository.query.Param;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
public interface ITermConceptDesignationDao extends IHapiJpaRepository<TermConceptDesignation> {
|
||||
public interface ITermConceptDesignationDao extends JpaRepository<TermConceptDesignation, Long> {
|
||||
|
||||
@Query("SELECT t.myId FROM TermConceptDesignation t WHERE t.myCodeSystemVersion.myId = :csv_pid")
|
||||
Slice<Long> findIdsByCodeSystemVersion(Pageable thePage, @Param("csv_pid") Long thePid);
|
||||
|
@ -36,9 +35,4 @@ public interface ITermConceptDesignationDao extends IHapiJpaRepository<TermConce
|
|||
@Query("SELECT COUNT(t) FROM TermConceptDesignation t WHERE t.myCodeSystemVersion.myId = :csv_pid")
|
||||
Integer countByCodeSystemVersion(@Param("csv_pid") Long thePid);
|
||||
|
||||
@Override
|
||||
@Modifying
|
||||
@Query("DELETE FROM TermConceptDesignation t WHERE t.myId = :pid")
|
||||
void deleteByPid(@Param("pid") Long theId);
|
||||
|
||||
}
|
||||
|
|
|
@ -1,10 +1,9 @@
|
|||
package ca.uhn.fhir.jpa.dao.data;
|
||||
|
||||
import ca.uhn.fhir.jpa.dao.IHapiJpaRepository;
|
||||
import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.data.domain.Slice;
|
||||
import org.springframework.data.jpa.repository.Modifying;
|
||||
import org.springframework.data.jpa.repository.JpaRepository;
|
||||
import org.springframework.data.jpa.repository.Query;
|
||||
import org.springframework.data.repository.query.Param;
|
||||
|
||||
|
@ -30,7 +29,7 @@ import java.util.Collection;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
public interface ITermConceptParentChildLinkDao extends IHapiJpaRepository<TermConceptParentChildLink> {
|
||||
public interface ITermConceptParentChildLinkDao extends JpaRepository<TermConceptParentChildLink, Long> {
|
||||
|
||||
@Query("SELECT COUNT(t) FROM TermConceptParentChildLink t WHERE t.myCodeSystem.myId = :cs_pid")
|
||||
Integer countByCodeSystemVersion(@Param("cs_pid") Long thePid);
|
||||
|
@ -41,13 +40,4 @@ public interface ITermConceptParentChildLinkDao extends IHapiJpaRepository<TermC
|
|||
@Query("SELECT t.myPid FROM TermConceptParentChildLink t WHERE t.myCodeSystem.myId = :cs_pid")
|
||||
Slice<Long> findIdsByCodeSystemVersion(Pageable thePage, @Param("cs_pid") Long thePid);
|
||||
|
||||
@Modifying
|
||||
@Query("DELETE FROM TermConceptParentChildLink t WHERE t.myChildPid = :pid OR t.myParentPid = :pid")
|
||||
void deleteByConceptPid(@Param("pid") Long theId);
|
||||
|
||||
@Override
|
||||
@Modifying
|
||||
@Query("DELETE FROM TermConceptParentChildLink t WHERE t.myPid = :pid")
|
||||
void deleteByPid(@Param("pid") Long theId);
|
||||
|
||||
}
|
||||
|
|
|
@ -1,10 +1,9 @@
|
|||
package ca.uhn.fhir.jpa.dao.data;
|
||||
|
||||
import ca.uhn.fhir.jpa.dao.IHapiJpaRepository;
|
||||
import ca.uhn.fhir.jpa.entity.TermConceptProperty;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.data.domain.Slice;
|
||||
import org.springframework.data.jpa.repository.Modifying;
|
||||
import org.springframework.data.jpa.repository.JpaRepository;
|
||||
import org.springframework.data.jpa.repository.Query;
|
||||
import org.springframework.data.repository.query.Param;
|
||||
|
||||
|
@ -28,7 +27,7 @@ import org.springframework.data.repository.query.Param;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
public interface ITermConceptPropertyDao extends IHapiJpaRepository<TermConceptProperty> {
|
||||
public interface ITermConceptPropertyDao extends JpaRepository<TermConceptProperty, Long> {
|
||||
|
||||
@Query("SELECT t.myId FROM TermConceptProperty t WHERE t.myCodeSystemVersion.myId = :cs_pid")
|
||||
Slice<Long> findIdsByCodeSystemVersion(Pageable thePage, @Param("cs_pid") Long thePid);
|
||||
|
@ -36,9 +35,4 @@ public interface ITermConceptPropertyDao extends IHapiJpaRepository<TermConceptP
|
|||
@Query("SELECT COUNT(t) FROM TermConceptProperty t WHERE t.myCodeSystemVersion.myId = :cs_pid")
|
||||
Integer countByCodeSystemVersion(@Param("cs_pid") Long thePid);
|
||||
|
||||
@Override
|
||||
@Modifying
|
||||
@Query("DELETE FROM TermConceptProperty t WHERE t.myId = :pid")
|
||||
void deleteByPid(@Param("pid") Long theId);
|
||||
|
||||
}
|
||||
|
|
|
@ -24,6 +24,7 @@ import ca.uhn.fhir.interceptor.api.HookParams;
|
|||
import ca.uhn.fhir.interceptor.api.IInterceptorBroadcaster;
|
||||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||
import ca.uhn.fhir.jpa.entity.PartitionEntity;
|
||||
import ca.uhn.fhir.jpa.entity.Search;
|
||||
import ca.uhn.fhir.jpa.entity.SearchInclude;
|
||||
import ca.uhn.fhir.jpa.entity.SearchResult;
|
||||
import ca.uhn.fhir.jpa.entity.SubscriptionTable;
|
||||
|
@ -162,7 +163,7 @@ public class ExpungeEverythingService {
|
|||
counter.addAndGet(expungeEverythingByType(ResourceTable.class));
|
||||
counter.addAndGet(expungeEverythingByType(PartitionEntity.class));
|
||||
myTxTemplate.execute(t -> {
|
||||
counter.addAndGet(doExpungeEverythingQuery("DELETE from " + org.hibernate.search.jpa.Search.class.getSimpleName() + " d"));
|
||||
counter.addAndGet(doExpungeEverythingQuery("DELETE from " + Search.class.getSimpleName() + " d"));
|
||||
return null;
|
||||
});
|
||||
|
||||
|
|
|
@ -24,15 +24,14 @@ import ca.uhn.fhir.context.RuntimeSearchParam;
|
|||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.dao.LegacySearchBuilder;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamCoords;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceLink;
|
||||
import ca.uhn.fhir.jpa.util.CoordCalculator;
|
||||
import ca.uhn.fhir.jpa.util.SearchBox;
|
||||
import ca.uhn.fhir.model.api.IQueryParameterType;
|
||||
import ca.uhn.fhir.model.dstu2.resource.Location;
|
||||
import ca.uhn.fhir.rest.param.QuantityParam;
|
||||
import ca.uhn.fhir.rest.param.SpecialParam;
|
||||
import ca.uhn.fhir.rest.param.TokenParam;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import org.hibernate.search.engine.spatial.GeoBoundingBox;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.context.annotation.Scope;
|
||||
|
@ -116,7 +115,7 @@ public class PredicateBuilderCoords extends BasePredicateBuilder implements IPre
|
|||
double latitudeDegrees = Double.parseDouble(latitudeValue);
|
||||
double longitudeDegrees = Double.parseDouble(longitudeValue);
|
||||
|
||||
SearchBox box = CoordCalculator.getBox(latitudeDegrees, longitudeDegrees, distanceKm);
|
||||
GeoBoundingBox box = CoordCalculator.getBox(latitudeDegrees, longitudeDegrees, distanceKm);
|
||||
latitudePredicate = latitudePredicateFromBox(theBuilder, theFrom, box);
|
||||
longitudePredicate = longitudePredicateFromBox(theBuilder, theFrom, box);
|
||||
}
|
||||
|
@ -124,24 +123,24 @@ public class PredicateBuilderCoords extends BasePredicateBuilder implements IPre
|
|||
return combineParamIndexPredicateWithParamNamePredicate(theResourceName, theSearchParam.getName(), theFrom, singleCode, theRequestPartitionId);
|
||||
}
|
||||
|
||||
private Predicate latitudePredicateFromBox(CriteriaBuilder theBuilder, From<?, ResourceIndexedSearchParamCoords> theFrom, SearchBox theBox) {
|
||||
private Predicate latitudePredicateFromBox(CriteriaBuilder theBuilder, From<?, ResourceIndexedSearchParamCoords> theFrom, GeoBoundingBox theBox) {
|
||||
return theBuilder.and(
|
||||
theBuilder.greaterThanOrEqualTo(theFrom.get("myLatitude"), theBox.getSouthWest().getLatitude()),
|
||||
theBuilder.lessThanOrEqualTo(theFrom.get("myLatitude"), theBox.getNorthEast().getLatitude())
|
||||
theBuilder.greaterThanOrEqualTo(theFrom.get("myLatitude"), theBox.bottomRight().latitude()),
|
||||
theBuilder.lessThanOrEqualTo(theFrom.get("myLatitude"), theBox.topLeft().latitude())
|
||||
);
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
Predicate longitudePredicateFromBox(CriteriaBuilder theBuilder, From<?, ResourceIndexedSearchParamCoords> theFrom, SearchBox theBox) {
|
||||
if (theBox.crossesAntiMeridian()) {
|
||||
Predicate longitudePredicateFromBox(CriteriaBuilder theBuilder, From<?, ResourceIndexedSearchParamCoords> theFrom, GeoBoundingBox theBox) {
|
||||
if (theBox.bottomRight().longitude() < theBox.topLeft().longitude()) {
|
||||
return theBuilder.or(
|
||||
theBuilder.greaterThanOrEqualTo(theFrom.get("myLongitude"), theBox.getNorthEast().getLongitude()),
|
||||
theBuilder.lessThanOrEqualTo(theFrom.get("myLongitude"), theBox.getSouthWest().getLongitude())
|
||||
theBuilder.greaterThanOrEqualTo(theFrom.get("myLongitude"), theBox.bottomRight().longitude()),
|
||||
theBuilder.lessThanOrEqualTo(theFrom.get("myLongitude"), theBox.topLeft().longitude())
|
||||
);
|
||||
}
|
||||
return theBuilder.and(
|
||||
theBuilder.greaterThanOrEqualTo(theFrom.get("myLongitude"), theBox.getSouthWest().getLongitude()),
|
||||
theBuilder.lessThanOrEqualTo(theFrom.get("myLongitude"), theBox.getNorthEast().getLongitude())
|
||||
theBuilder.greaterThanOrEqualTo(theFrom.get("myLongitude"), theBox.topLeft().longitude()),
|
||||
theBuilder.lessThanOrEqualTo(theFrom.get("myLongitude"), theBox.bottomRight().longitude())
|
||||
);
|
||||
}
|
||||
|
||||
|
|
|
@ -285,7 +285,12 @@ public class MdmLink {
|
|||
return myEidMatch;
|
||||
}
|
||||
|
||||
public boolean isEidMatch() {
|
||||
/**
|
||||
* Note that this method can not be called <code>getEidMatch</code> or
|
||||
* <code>isEidMatch</code> because Hibernate Search complains about having
|
||||
* 2 accessors for this property
|
||||
*/
|
||||
public boolean isEidMatchPresent() {
|
||||
return myEidMatch != null && myEidMatch;
|
||||
}
|
||||
|
||||
|
|
|
@ -40,6 +40,7 @@ import javax.persistence.OneToMany;
|
|||
import javax.persistence.OneToOne;
|
||||
import javax.persistence.SequenceGenerator;
|
||||
import javax.persistence.Table;
|
||||
import javax.persistence.Transient;
|
||||
import javax.persistence.UniqueConstraint;
|
||||
import java.io.Serializable;
|
||||
import java.util.ArrayList;
|
||||
|
@ -57,6 +58,7 @@ public class TermCodeSystemVersion implements Serializable {
|
|||
public static final String IDX_CODESYSTEM_AND_VER = "IDX_CODESYSTEM_AND_VER";
|
||||
public static final int MAX_VERSION_LENGTH = 200;
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
@OneToMany(fetch = FetchType.LAZY, mappedBy = "myCodeSystem")
|
||||
private Collection<TermConcept> myConcepts;
|
||||
|
||||
|
@ -73,7 +75,7 @@ public class TermCodeSystemVersion implements Serializable {
|
|||
@Column(name = "RES_ID", nullable = false, insertable = false, updatable = false)
|
||||
private Long myResourcePid;
|
||||
|
||||
@Column(name = "CS_VERSION_ID", nullable = true, updatable = false, length = MAX_VERSION_LENGTH)
|
||||
@Column(name = "CS_VERSION_ID", nullable = true, updatable = true, length = MAX_VERSION_LENGTH)
|
||||
private String myCodeSystemVersionId;
|
||||
|
||||
/**
|
||||
|
@ -91,7 +93,7 @@ public class TermCodeSystemVersion implements Serializable {
|
|||
@OneToOne(mappedBy = "myCurrentVersion", optional = true, fetch = FetchType.LAZY)
|
||||
private TermCodeSystem myCodeSystemHavingThisVersionAsCurrentVersionIfAny;
|
||||
|
||||
@Column(name = "CS_DISPLAY", nullable = true, updatable = false, length = MAX_VERSION_LENGTH)
|
||||
@Column(name = "CS_DISPLAY", nullable = true, updatable = true, length = MAX_VERSION_LENGTH)
|
||||
private String myCodeSystemDisplayName;
|
||||
|
||||
/**
|
||||
|
@ -196,6 +198,7 @@ public class TermCodeSystemVersion implements Serializable {
|
|||
public String toString() {
|
||||
ToStringBuilder b = new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE);
|
||||
b.append("pid", myId);
|
||||
b.append("displayName", myCodeSystemDisplayName);
|
||||
b.append("codeSystemResourcePid", myResourcePid);
|
||||
b.append("codeSystemPid", myCodeSystemPid);
|
||||
b.append("codeSystemVersionId", myCodeSystemVersionId);
|
||||
|
|
|
@ -22,14 +22,21 @@ package ca.uhn.fhir.jpa.entity;
|
|||
|
||||
import ca.uhn.fhir.context.support.IValidationSupport;
|
||||
import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink.RelationshipTypeEnum;
|
||||
import ca.uhn.fhir.jpa.search.DeferConceptIndexingInterceptor;
|
||||
import ca.uhn.fhir.jpa.search.DeferConceptIndexingRoutingBinder;
|
||||
import ca.uhn.fhir.util.ValidateUtil;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.apache.commons.lang3.builder.EqualsBuilder;
|
||||
import org.apache.commons.lang3.builder.HashCodeBuilder;
|
||||
import org.apache.commons.lang3.builder.ToStringBuilder;
|
||||
import org.apache.commons.lang3.builder.ToStringStyle;
|
||||
import org.hibernate.search.annotations.*;
|
||||
import org.hibernate.search.engine.backend.types.Projectable;
|
||||
import org.hibernate.search.engine.backend.types.Searchable;
|
||||
import org.hibernate.search.mapper.pojo.bridge.mapping.annotation.PropertyBinderRef;
|
||||
import org.hibernate.search.mapper.pojo.bridge.mapping.annotation.RoutingBinderRef;
|
||||
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.FullTextField;
|
||||
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.GenericField;
|
||||
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.Indexed;
|
||||
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.PropertyBinding;
|
||||
import org.hl7.fhir.r4.model.Coding;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
|
@ -43,7 +50,7 @@ import static org.apache.commons.lang3.StringUtils.left;
|
|||
import static org.apache.commons.lang3.StringUtils.length;
|
||||
|
||||
@Entity
|
||||
@Indexed(interceptor = DeferConceptIndexingInterceptor.class)
|
||||
@Indexed(routingBinder=@RoutingBinderRef(type = DeferConceptIndexingRoutingBinder.class))
|
||||
@Table(name = "TRM_CONCEPT", uniqueConstraints = {
|
||||
@UniqueConstraint(name = "IDX_CONCEPT_CS_CODE", columnNames = {"CODESYSTEM_PID", "CODEVAL"})
|
||||
}, indexes = {
|
||||
|
@ -55,49 +62,59 @@ public class TermConcept implements Serializable {
|
|||
public static final int MAX_DESC_LENGTH = 400;
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(TermConcept.class);
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
@OneToMany(fetch = FetchType.LAZY, mappedBy = "myParent", cascade = {})
|
||||
private List<TermConceptParentChildLink> myChildren;
|
||||
|
||||
@Column(name = "CODEVAL", nullable = false, length = MAX_CODE_LENGTH)
|
||||
@Fields({@Field(name = "myCode", index = org.hibernate.search.annotations.Index.YES, store = Store.YES, analyze = Analyze.YES, analyzer = @Analyzer(definition = "exactAnalyzer")),})
|
||||
@FullTextField(name = "myCode", searchable = Searchable.YES, projectable = Projectable.YES, analyzer = "exactAnalyzer")
|
||||
private String myCode;
|
||||
|
||||
@Temporal(TemporalType.TIMESTAMP)
|
||||
@Column(name = "CONCEPT_UPDATED", nullable = true)
|
||||
private Date myUpdated;
|
||||
|
||||
@ManyToOne(fetch = FetchType.LAZY)
|
||||
@JoinColumn(name = "CODESYSTEM_PID", referencedColumnName = "PID", foreignKey = @ForeignKey(name = "FK_CONCEPT_PID_CS_PID"))
|
||||
private TermCodeSystemVersion myCodeSystem;
|
||||
|
||||
@Column(name = "CODESYSTEM_PID", insertable = false, updatable = false)
|
||||
@Fields({@Field(name = "myCodeSystemVersionPid")})
|
||||
@GenericField(name = "myCodeSystemVersionPid")
|
||||
private long myCodeSystemVersionPid;
|
||||
|
||||
@Column(name = "DISPLAY", nullable = true, length = MAX_DESC_LENGTH)
|
||||
@Fields({
|
||||
@Field(name = "myDisplay", index = org.hibernate.search.annotations.Index.YES, store = Store.YES, analyze = Analyze.YES, analyzer = @Analyzer(definition = "standardAnalyzer")),
|
||||
@Field(name = "myDisplayEdgeNGram", index = org.hibernate.search.annotations.Index.YES, store = Store.NO, analyze = Analyze.YES, analyzer = @Analyzer(definition = "autocompleteEdgeAnalyzer")),
|
||||
@Field(name = "myDisplayWordEdgeNGram", index = org.hibernate.search.annotations.Index.YES, store = Store.NO, analyze = Analyze.YES, analyzer = @Analyzer(definition = "autocompleteWordEdgeAnalyzer")),
|
||||
@Field(name = "myDisplayNGram", index = org.hibernate.search.annotations.Index.YES, store = Store.NO, analyze = Analyze.YES, analyzer = @Analyzer(definition = "autocompleteNGramAnalyzer")),
|
||||
@Field(name = "myDisplayPhonetic", index = org.hibernate.search.annotations.Index.YES, store = Store.NO, analyze = Analyze.YES, analyzer = @Analyzer(definition = "autocompletePhoneticAnalyzer"))
|
||||
})
|
||||
@FullTextField(name = "myDisplay", searchable = Searchable.YES, projectable = Projectable.YES, analyzer = "standardAnalyzer")
|
||||
@FullTextField(name = "myDisplayEdgeNGram", searchable= Searchable.YES, projectable= Projectable.NO, analyzer = "autocompleteEdgeAnalyzer")
|
||||
@FullTextField(name = "myDisplayWordEdgeNGram", searchable= Searchable.YES, projectable= Projectable.NO, analyzer = "autocompleteWordEdgeAnalyzer")
|
||||
@FullTextField(name = "myDisplayNGram", searchable= Searchable.YES, projectable= Projectable.NO, analyzer = "autocompleteNGramAnalyzer")
|
||||
@FullTextField(name = "myDisplayPhonetic", searchable= Searchable.YES, projectable= Projectable.NO, analyzer = "autocompletePhoneticAnalyzer")
|
||||
private String myDisplay;
|
||||
|
||||
@OneToMany(mappedBy = "myConcept", orphanRemoval = false, fetch = FetchType.LAZY)
|
||||
@Field(name = "PROPmyProperties", analyzer = @Analyzer(definition = "termConceptPropertyAnalyzer"))
|
||||
@FieldBridge(impl = TermConceptPropertyFieldBridge.class)
|
||||
@PropertyBinding(binder = @PropertyBinderRef(type = TermConceptPropertyBinder.class))
|
||||
private Collection<TermConceptProperty> myProperties;
|
||||
|
||||
@OneToMany(mappedBy = "myConcept", orphanRemoval = false, fetch = FetchType.LAZY)
|
||||
private Collection<TermConceptDesignation> myDesignations;
|
||||
@Id()
|
||||
|
||||
@Id
|
||||
@SequenceGenerator(name = "SEQ_CONCEPT_PID", sequenceName = "SEQ_CONCEPT_PID")
|
||||
@GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_CONCEPT_PID")
|
||||
@Column(name = "PID")
|
||||
@GenericField
|
||||
private Long myId;
|
||||
|
||||
@Column(name = "INDEX_STATUS", nullable = true)
|
||||
private Long myIndexStatus;
|
||||
@Field(name = "myParentPids", index = org.hibernate.search.annotations.Index.YES, store = Store.YES, analyze = Analyze.YES, analyzer = @Analyzer(definition = "conceptParentPidsAnalyzer"))
|
||||
|
||||
@Lob
|
||||
@Column(name = "PARENT_PIDS", nullable = true)
|
||||
@FullTextField(name = "myParentPids", searchable = Searchable.YES, projectable = Projectable.YES, analyzer = "conceptParentPidsAnalyzer")
|
||||
private String myParentPids;
|
||||
|
||||
@OneToMany(cascade = {}, fetch = FetchType.LAZY, mappedBy = "myChild")
|
||||
private List<TermConceptParentChildLink> myParents;
|
||||
|
||||
@Column(name = "CODE_SEQUENCE", nullable = true)
|
||||
private Integer mySequence;
|
||||
|
||||
|
@ -382,6 +399,8 @@ public class TermConcept implements Serializable {
|
|||
@Override
|
||||
public String toString() {
|
||||
ToStringBuilder b = new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE);
|
||||
b.append("pid", myId);
|
||||
b.append("csvPid", myCodeSystemVersionPid);
|
||||
b.append("code", myCode);
|
||||
b.append("display", myDisplay);
|
||||
if (mySequence != null) {
|
||||
|
|
|
@ -131,4 +131,7 @@ public class TermConceptDesignation implements Serializable {
|
|||
}
|
||||
|
||||
|
||||
public Long getPid() {
|
||||
return myId;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,8 +20,7 @@ package ca.uhn.fhir.jpa.entity;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import org.hibernate.search.annotations.Field;
|
||||
import org.hibernate.search.annotations.Fields;
|
||||
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.FullTextField;
|
||||
|
||||
import javax.persistence.*;
|
||||
import java.io.Serializable;
|
||||
|
@ -44,7 +43,7 @@ public class TermConceptParentChildLink implements Serializable {
|
|||
private TermCodeSystemVersion myCodeSystem;
|
||||
|
||||
@Column(name = "CODESYSTEM_PID", insertable = false, updatable = false, nullable = false)
|
||||
@Fields({@Field(name = "myCodeSystemVersionPid")})
|
||||
@FullTextField(name = "myCodeSystemVersionPid")
|
||||
private long myCodeSystemVersionPid;
|
||||
|
||||
@ManyToOne(fetch = FetchType.LAZY, cascade = {})
|
||||
|
|
|
@ -251,4 +251,8 @@ public class TermConceptProperty implements Serializable {
|
|||
.append(myDisplay)
|
||||
.toHashCode();
|
||||
}
|
||||
|
||||
public Long getPid() {
|
||||
return myId;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,77 @@
|
|||
package ca.uhn.fhir.jpa.entity;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import org.hibernate.search.engine.backend.document.DocumentElement;
|
||||
import org.hibernate.search.engine.backend.document.model.dsl.IndexSchemaElement;
|
||||
import org.hibernate.search.mapper.pojo.bridge.PropertyBridge;
|
||||
import org.hibernate.search.mapper.pojo.bridge.binding.PropertyBindingContext;
|
||||
import org.hibernate.search.mapper.pojo.bridge.mapping.programmatic.PropertyBinder;
|
||||
import org.hibernate.search.mapper.pojo.bridge.runtime.PropertyBridgeWriteContext;
|
||||
|
||||
import java.util.Collection;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
|
||||
/**
|
||||
* Allows hibernate search to index individual concepts' properties
|
||||
*/
|
||||
public class TermConceptPropertyBinder implements PropertyBinder {
|
||||
|
||||
|
||||
public static final String CONCEPT_FIELD_PROPERTY_PREFIX = "PROP";
|
||||
|
||||
@Override
|
||||
public void bind(PropertyBindingContext thePropertyBindingContext) {
|
||||
thePropertyBindingContext.dependencies().use("myKey").use("myValue");
|
||||
IndexSchemaElement indexSchemaElement = thePropertyBindingContext.indexSchemaElement();
|
||||
|
||||
//In order to support dynamic fields, we have to use field templates. We _must_ define the template at bootstrap time and cannot
|
||||
//create them adhoc. https://docs.jboss.org/hibernate/search/6.0/reference/en-US/html_single/#mapper-orm-bridge-index-field-dsl-dynamic
|
||||
//I _think_ im doing the right thing here by indicating that everything matching this template uses this analyzer.
|
||||
indexSchemaElement.fieldTemplate("propTemplate", f -> f.asString().analyzer("termConceptPropertyAnalyzer"))
|
||||
.matchingPathGlob(CONCEPT_FIELD_PROPERTY_PREFIX + "*")
|
||||
.multiValued();
|
||||
|
||||
|
||||
thePropertyBindingContext.bridge(new TermConceptPropertyBridge());
|
||||
}
|
||||
|
||||
private class TermConceptPropertyBridge implements PropertyBridge {
|
||||
|
||||
@Override
|
||||
public void write(DocumentElement theDocument, Object theObject, PropertyBridgeWriteContext thePropertyBridgeWriteContext) {
|
||||
|
||||
Collection<TermConceptProperty> properties = (Collection<TermConceptProperty>) theObject;
|
||||
|
||||
if (properties != null) {
|
||||
for (TermConceptProperty next : properties) {
|
||||
theDocument.addValue(CONCEPT_FIELD_PROPERTY_PREFIX + next.getKey(), next.getValue());
|
||||
System.out.println("Adding Prop: " + CONCEPT_FIELD_PROPERTY_PREFIX + next.getKey() + " -- " + next.getValue());
|
||||
if (next.getType() == TermConceptPropertyTypeEnum.CODING && isNotBlank(next.getDisplay())) {
|
||||
theDocument.addValue(CONCEPT_FIELD_PROPERTY_PREFIX + next.getKey(), next.getDisplay());
|
||||
System.out.println("Adding multivalue Prop: " + CONCEPT_FIELD_PROPERTY_PREFIX + next.getKey() + " -- " + next.getDisplay());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,70 +0,0 @@
|
|||
package ca.uhn.fhir.jpa.entity;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import org.apache.lucene.document.Document;
|
||||
import org.apache.lucene.document.Field;
|
||||
import org.apache.lucene.document.StringField;
|
||||
import org.hibernate.search.bridge.FieldBridge;
|
||||
import org.hibernate.search.bridge.LuceneOptions;
|
||||
import org.hibernate.search.bridge.StringBridge;
|
||||
|
||||
import java.util.Collection;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
|
||||
/**
|
||||
* Allows hibernate search to index individual concepts' properties
|
||||
*/
|
||||
public class TermConceptPropertyFieldBridge implements FieldBridge, StringBridge {
|
||||
|
||||
public static final String CONCEPT_FIELD_PROPERTY_PREFIX = "PROP";
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
*/
|
||||
public TermConceptPropertyFieldBridge() {
|
||||
super();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String objectToString(Object theObject) {
|
||||
return theObject.toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void set(String theName, Object theValue, Document theDocument, LuceneOptions theLuceneOptions) {
|
||||
@SuppressWarnings("unchecked")
|
||||
Collection<TermConceptProperty> properties = (Collection<TermConceptProperty>) theValue;
|
||||
|
||||
if (properties != null) {
|
||||
for (TermConceptProperty next : properties) {
|
||||
theDocument.add(new StringField(CONCEPT_FIELD_PROPERTY_PREFIX + next.getKey(), next.getValue(), Field.Store.YES));
|
||||
|
||||
if (next.getType() == TermConceptPropertyTypeEnum.CODING) {
|
||||
if (isNotBlank(next.getDisplay())) {
|
||||
theDocument.add(new StringField(CONCEPT_FIELD_PROPERTY_PREFIX + next.getKey(), next.getDisplay(), Field.Store.YES));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -36,11 +36,11 @@ import static org.apache.commons.lang3.StringUtils.left;
|
|||
import static org.apache.commons.lang3.StringUtils.length;
|
||||
|
||||
/*
|
||||
* DM 2019-08-01 - Do not use IDX_VALUESET_CONCEPT_CS_CD; this was previously used as an index so reusing the name will
|
||||
* DM 2019-08-01 - Do not use IDX_VALUESET_CONCEPT_CS_CD or IDX_VALUESET_CONCEPT_CS_CODE; this was previously used as an index so reusing the name will
|
||||
* bork up migration tasks.
|
||||
*/
|
||||
@Table(name = "TRM_VALUESET_CONCEPT", uniqueConstraints = {
|
||||
@UniqueConstraint(name = "IDX_VS_CONCEPT_CS_CODE", columnNames = {"VALUESET_PID", "SYSTEM_URL", "SYSTEM_VER", "CODEVAL"}),
|
||||
@UniqueConstraint(name = "IDX_VS_CONCEPT_CSCD", columnNames = {"VALUESET_PID", "SYSTEM_URL", "CODEVAL"}),
|
||||
@UniqueConstraint(name = "IDX_VS_CONCEPT_ORDER", columnNames = {"VALUESET_PID", "VALUESET_ORDER"})
|
||||
})
|
||||
@Entity()
|
||||
|
|
|
@ -46,4 +46,5 @@ abstract class BaseTypedRule implements IRepositoryValidatingRule {
|
|||
protected FhirContext getFhirContext() {
|
||||
return myFhirContext;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -93,7 +93,7 @@ public class RepositoryValidatingInterceptor {
|
|||
/**
|
||||
* Interceptor hook method. This method should not be called directly.
|
||||
*/
|
||||
@Hook(Pointcut.STORAGE_PRECOMMIT_RESOURCE_CREATED)
|
||||
@Hook(Pointcut.STORAGE_PRESTORAGE_RESOURCE_CREATED)
|
||||
void create(IBaseResource theResource) {
|
||||
handle(theResource);
|
||||
}
|
||||
|
@ -101,7 +101,7 @@ public class RepositoryValidatingInterceptor {
|
|||
/**
|
||||
* Interceptor hook method. This method should not be called directly.
|
||||
*/
|
||||
@Hook(Pointcut.STORAGE_PRECOMMIT_RESOURCE_UPDATED)
|
||||
@Hook(Pointcut.STORAGE_PRESTORAGE_RESOURCE_UPDATED)
|
||||
void update(IBaseResource theOldResource, IBaseResource theNewResource) {
|
||||
handle(theNewResource);
|
||||
}
|
||||
|
|
|
@ -23,6 +23,7 @@ package ca.uhn.fhir.jpa.interceptor.validation;
|
|||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.support.IValidationSupport;
|
||||
import ca.uhn.fhir.jpa.validation.ValidatorResourceFetcher;
|
||||
import ca.uhn.fhir.validation.ResultSeverityEnum;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.apache.commons.text.WordUtils;
|
||||
import org.hl7.fhir.r5.utils.IResourceValidator;
|
||||
|
@ -34,6 +35,7 @@ import java.util.Arrays;
|
|||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
|
||||
import static com.google.common.base.Ascii.toLowerCase;
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
|
||||
/**
|
||||
|
@ -204,6 +206,77 @@ public final class RepositoryValidatingRuleBuilder implements IRuleRoot {
|
|||
myRule.setBestPracticeWarningLevel(bestPracticeWarningLevel);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Specifies that the resource should not be rejected from storage even if it does not pass validation.
|
||||
*/
|
||||
@Nonnull
|
||||
public FinalizedRequireValidationRule dontReject() {
|
||||
myRule.dontReject();
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Specifies the minimum validation result severity that should cause a rejection. For example, if
|
||||
* this is set to <code>ERROR</code> (which is the default), any validation results with a severity
|
||||
* of <code>ERROR</code> or <code>FATAL</code> will cause the create/update operation to be rejected and
|
||||
* rolled back, and no data will be saved.
|
||||
* <p>
|
||||
* Valid values must be drawn from {@link ResultSeverityEnum}
|
||||
* </p>
|
||||
*/
|
||||
@Nonnull
|
||||
public FinalizedRequireValidationRule rejectOnSeverity(@Nonnull String theSeverity) {
|
||||
ResultSeverityEnum severity = ResultSeverityEnum.fromCode(toLowerCase(theSeverity));
|
||||
Validate.notNull(severity, "Invalid severity code: %s", theSeverity);
|
||||
return rejectOnSeverity(severity);
|
||||
}
|
||||
|
||||
/**
|
||||
* Specifies the minimum validation result severity that should cause a rejection. For example, if
|
||||
* this is set to <code>ERROR</code> (which is the default), any validation results with a severity
|
||||
* of <code>ERROR</code> or <code>FATAL</code> will cause the create/update operation to be rejected and
|
||||
* rolled back, and no data will be saved.
|
||||
* <p>
|
||||
* Valid values must be drawn from {@link ResultSeverityEnum}
|
||||
* </p>
|
||||
*/
|
||||
@Nonnull
|
||||
public FinalizedRequireValidationRule rejectOnSeverity(@Nonnull ResultSeverityEnum theSeverity) {
|
||||
myRule.rejectOnSeverity(theSeverity);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Specifies that if the validation results in any results with a severity of <code>theSeverity</code> or
|
||||
* greater, the resource will be tagged with the given tag when it is saved.
|
||||
*
|
||||
* @param theSeverity The minimum severity. Must be drawn from values in {@link ResultSeverityEnum} and must not be <code>null</code>
|
||||
* @param theTagSystem The system for the tag to add. Must not be <code>null</code>
|
||||
* @param theTagCode The code for the tag to add. Must not be <code>null</code>
|
||||
* @return
|
||||
*/
|
||||
@Nonnull
|
||||
public FinalizedRequireValidationRule tagOnSeverity(@Nonnull String theSeverity,@Nonnull String theTagSystem,@Nonnull String theTagCode) {
|
||||
ResultSeverityEnum severity = ResultSeverityEnum.fromCode(toLowerCase(theSeverity));
|
||||
return tagOnSeverity(severity, theTagSystem, theTagCode);
|
||||
}
|
||||
|
||||
/**
|
||||
* Specifies that if the validation results in any results with a severity of <code>theSeverity</code> or
|
||||
* greater, the resource will be tagged with the given tag when it is saved.
|
||||
*
|
||||
* @param theSeverity The minimum severity. Must be drawn from values in {@link ResultSeverityEnum} and must not be <code>null</code>
|
||||
* @param theTagSystem The system for the tag to add. Must not be <code>null</code>
|
||||
* @param theTagCode The code for the tag to add. Must not be <code>null</code>
|
||||
* @return
|
||||
*/
|
||||
@Nonnull
|
||||
public FinalizedRequireValidationRule tagOnSeverity(@Nonnull ResultSeverityEnum theSeverity,@Nonnull String theTagSystem,@Nonnull String theTagCode) {
|
||||
myRule.tagOnSeverity(theSeverity, theTagSystem, theTagCode);
|
||||
return this;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -27,18 +27,24 @@ import ca.uhn.fhir.validation.FhirValidator;
|
|||
import ca.uhn.fhir.validation.ResultSeverityEnum;
|
||||
import ca.uhn.fhir.validation.SingleValidationMessage;
|
||||
import ca.uhn.fhir.validation.ValidationResult;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.hl7.fhir.common.hapi.validation.validator.FhirInstanceValidator;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.r5.utils.IResourceValidator;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
class RequireValidationRule extends BaseTypedRule {
|
||||
private final IValidationSupport myValidationSupport;
|
||||
private final ValidatorResourceFetcher myValidatorResourceFetcher;
|
||||
private final FhirInstanceValidator myValidator;
|
||||
private ResultSeverityEnum myRejectOnSeverity = ResultSeverityEnum.ERROR;
|
||||
private List<TagOnSeverity> myTagOnSeverity = Collections.emptyList();
|
||||
|
||||
RequireValidationRule(FhirContext theFhirContext, String theType, IValidationSupport theValidationSupport, ValidatorResourceFetcher theValidatorResourceFetcher) {
|
||||
public RequireValidationRule(FhirContext theFhirContext, String theType, IValidationSupport theValidationSupport, ValidatorResourceFetcher theValidatorResourceFetcher) {
|
||||
super(theFhirContext, theType);
|
||||
myValidationSupport = theValidationSupport;
|
||||
myValidatorResourceFetcher = theValidatorResourceFetcher;
|
||||
|
@ -62,10 +68,67 @@ class RequireValidationRule extends BaseTypedRule {
|
|||
|
||||
for (SingleValidationMessage next : outcome.getMessages()) {
|
||||
if (next.getSeverity().ordinal() >= ResultSeverityEnum.ERROR.ordinal()) {
|
||||
return RuleEvaluation.forFailure(this, outcome.toOperationOutcome());
|
||||
if (myRejectOnSeverity != null && myRejectOnSeverity.ordinal() <= next.getSeverity().ordinal()) {
|
||||
return RuleEvaluation.forFailure(this, outcome.toOperationOutcome());
|
||||
}
|
||||
}
|
||||
|
||||
for (TagOnSeverity nextTagOnSeverity : myTagOnSeverity) {
|
||||
if (next.getSeverity().ordinal() >= nextTagOnSeverity.getSeverity()) {
|
||||
theResource
|
||||
.getMeta()
|
||||
.addTag()
|
||||
.setSystem(nextTagOnSeverity.getTagSystem())
|
||||
.setCode(nextTagOnSeverity.getTagCode());
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return RuleEvaluation.forSuccess(this);
|
||||
}
|
||||
|
||||
public void rejectOnSeverity(ResultSeverityEnum theSeverity) {
|
||||
myRejectOnSeverity = theSeverity;
|
||||
}
|
||||
|
||||
public void tagOnSeverity(ResultSeverityEnum theSeverity, String theTagSystem, String theTagCode) {
|
||||
Validate.notNull(theSeverity, "theSeverity must not be null");
|
||||
Validate.notEmpty(theTagSystem, "theTagSystem must not be null or empty");
|
||||
Validate.notEmpty(theTagCode, "theTagCode must not be null or empty");
|
||||
if (myTagOnSeverity.isEmpty()) {
|
||||
myTagOnSeverity = new ArrayList<>();
|
||||
}
|
||||
myTagOnSeverity.add(new TagOnSeverity(theSeverity.ordinal(), theTagSystem, theTagCode));
|
||||
}
|
||||
|
||||
public void dontReject() {
|
||||
myRejectOnSeverity = null;
|
||||
}
|
||||
|
||||
|
||||
private static class TagOnSeverity {
|
||||
private final int mySeverity;
|
||||
private final String myTagSystem;
|
||||
private final String myTagCode;
|
||||
|
||||
private TagOnSeverity(int theSeverity, String theTagSystem, String theTagCode) {
|
||||
mySeverity = theSeverity;
|
||||
myTagSystem = theTagSystem;
|
||||
myTagCode = theTagCode;
|
||||
}
|
||||
|
||||
public int getSeverity() {
|
||||
return mySeverity;
|
||||
}
|
||||
|
||||
public String getTagSystem() {
|
||||
return myTagSystem;
|
||||
}
|
||||
|
||||
public String getTagCode() {
|
||||
return myTagCode;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,17 +1,14 @@
|
|||
package ca.uhn.fhir.jpa.provider;
|
||||
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
|
||||
import ca.uhn.fhir.jpa.dao.FulltextSearchSvcImpl.Suggestion;
|
||||
import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
|
||||
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||
import ca.uhn.fhir.jpa.provider.dstu3.JpaSystemProviderDstu3;
|
||||
import ca.uhn.fhir.model.api.annotation.Description;
|
||||
import ca.uhn.fhir.model.dstu2.composite.MetaDt;
|
||||
import ca.uhn.fhir.model.dstu2.resource.Bundle;
|
||||
import ca.uhn.fhir.model.dstu2.resource.Parameters;
|
||||
import ca.uhn.fhir.model.dstu2.resource.Parameters.Parameter;
|
||||
import ca.uhn.fhir.model.primitive.BooleanDt;
|
||||
import ca.uhn.fhir.model.primitive.DecimalDt;
|
||||
import ca.uhn.fhir.model.primitive.IntegerDt;
|
||||
import ca.uhn.fhir.model.primitive.StringDt;
|
||||
import ca.uhn.fhir.rest.annotation.IdParam;
|
||||
|
@ -20,7 +17,6 @@ import ca.uhn.fhir.rest.annotation.OperationParam;
|
|||
import ca.uhn.fhir.rest.annotation.Transaction;
|
||||
import ca.uhn.fhir.rest.annotation.TransactionParam;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
||||
import org.hl7.fhir.instance.model.api.IBaseBundle;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
|
@ -216,36 +212,6 @@ public class JpaSystemProviderDstu2 extends BaseJpaSystemProviderDstu2Plus<Bundl
|
|||
return parameters;
|
||||
}
|
||||
|
||||
@Operation(name = JpaConstants.OPERATION_SUGGEST_KEYWORDS, idempotent = true)
|
||||
public Parameters suggestKeywords(
|
||||
@OperationParam(name = "context", min = 1, max = 1) String theContext,
|
||||
@OperationParam(name = "searchParam", min = 1, max = 1) String theSearchParam,
|
||||
@OperationParam(name = "text", min = 1, max = 1) String theText,
|
||||
RequestDetails theRequest) {
|
||||
JpaSystemProviderDstu3.validateFulltextSearchEnabled(mySearchDao);
|
||||
|
||||
if (isBlank(theContext)) {
|
||||
throw new InvalidRequestException("Parameter 'context' must be provided");
|
||||
}
|
||||
if (isBlank(theSearchParam)) {
|
||||
throw new InvalidRequestException("Parameter 'searchParam' must be provided");
|
||||
}
|
||||
if (isBlank(theText)) {
|
||||
throw new InvalidRequestException("Parameter 'text' must be provided");
|
||||
}
|
||||
|
||||
List<Suggestion> keywords = mySearchDao.suggestKeywords(theContext, theSearchParam, theText, theRequest);
|
||||
|
||||
Parameters retVal = new Parameters();
|
||||
for (Suggestion next : keywords) {
|
||||
retVal.addParameter()
|
||||
.addPart(new Parameter().setName("keyword").setValue(new StringDt(next.getTerm())))
|
||||
.addPart(new Parameter().setName("score").setValue(new DecimalDt(next.getScore())));
|
||||
}
|
||||
|
||||
return retVal;
|
||||
}
|
||||
|
||||
@Transaction
|
||||
public Bundle transaction(RequestDetails theRequestDetails, @TransactionParam Bundle theResources) {
|
||||
startRequest(((ServletRequestDetails) theRequestDetails).getServletRequest());
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
package ca.uhn.fhir.jpa.provider.dstu3;
|
||||
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
|
||||
import ca.uhn.fhir.jpa.dao.FulltextSearchSvcImpl.Suggestion;
|
||||
import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
|
||||
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||
import ca.uhn.fhir.jpa.provider.BaseJpaSystemProviderDstu2Plus;
|
||||
|
@ -224,36 +223,6 @@ public class JpaSystemProviderDstu3 extends BaseJpaSystemProviderDstu2Plus<Bundl
|
|||
return parameters;
|
||||
}
|
||||
|
||||
@Operation(name = JpaConstants.OPERATION_SUGGEST_KEYWORDS, idempotent = true)
|
||||
public Parameters suggestKeywords(
|
||||
@OperationParam(name = "context", min = 1, max = 1) String theContext,
|
||||
@OperationParam(name = "searchParam", min = 1, max = 1) String theSearchParam,
|
||||
@OperationParam(name = "text", min = 1, max = 1) String theText,
|
||||
RequestDetails theRequest) {
|
||||
|
||||
|
||||
if (isBlank(theContext)) {
|
||||
throw new InvalidRequestException("Parameter 'context' must be provided");
|
||||
}
|
||||
if (isBlank(theSearchParam)) {
|
||||
throw new InvalidRequestException("Parameter 'searchParam' must be provided");
|
||||
}
|
||||
if (isBlank(theText)) {
|
||||
throw new InvalidRequestException("Parameter 'text' must be provided");
|
||||
}
|
||||
|
||||
List<Suggestion> keywords = mySearchDao.suggestKeywords(theContext, theSearchParam, theText, theRequest);
|
||||
|
||||
Parameters retVal = new Parameters();
|
||||
for (Suggestion next : keywords) {
|
||||
retVal.addParameter()
|
||||
.addPart(new ParametersParameterComponent().setName("keyword").setValue(new StringType(next.getTerm())))
|
||||
.addPart(new ParametersParameterComponent().setName("score").setValue(new DecimalType(next.getScore())));
|
||||
}
|
||||
|
||||
return retVal;
|
||||
}
|
||||
|
||||
@Transaction
|
||||
public Bundle transaction(RequestDetails theRequestDetails, @TransactionParam Bundle theResources) {
|
||||
startRequest(((ServletRequestDetails) theRequestDetails).getServletRequest());
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
package ca.uhn.fhir.jpa.provider.r4;
|
||||
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
|
||||
import ca.uhn.fhir.jpa.dao.FulltextSearchSvcImpl.Suggestion;
|
||||
import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
|
||||
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||
import ca.uhn.fhir.jpa.provider.BaseJpaSystemProviderDstu2Plus;
|
||||
|
@ -211,38 +210,6 @@ public class JpaSystemProviderR4 extends BaseJpaSystemProviderDstu2Plus<Bundle,
|
|||
return parameters;
|
||||
}
|
||||
|
||||
@Operation(name = JpaConstants.OPERATION_SUGGEST_KEYWORDS, idempotent = true)
|
||||
public Parameters suggestKeywords(
|
||||
@OperationParam(name = "context", min = 1, max = 1) String theContext,
|
||||
@OperationParam(name = "searchParam", min = 1, max = 1) String theSearchParam,
|
||||
@OperationParam(name = "text", min = 1, max = 1) String theText,
|
||||
RequestDetails theRequest) {
|
||||
ca.uhn.fhir.jpa.provider.dstu3.JpaSystemProviderDstu3.validateFulltextSearchEnabled(mySearchDao);
|
||||
|
||||
if (isBlank(theContext)) {
|
||||
throw new InvalidRequestException("Parameter 'context' must be provided");
|
||||
}
|
||||
if (isBlank(theSearchParam)) {
|
||||
throw new InvalidRequestException("Parameter 'searchParam' must be provided");
|
||||
}
|
||||
if (isBlank(theText)) {
|
||||
throw new InvalidRequestException("Parameter 'text' must be provided");
|
||||
}
|
||||
|
||||
List<Suggestion> keywords = mySearchDao.suggestKeywords(theContext, theSearchParam, theText, theRequest);
|
||||
|
||||
Parameters retVal = new Parameters();
|
||||
for (Suggestion next : keywords) {
|
||||
//@formatter:off
|
||||
retVal.addParameter()
|
||||
.addPart(new ParametersParameterComponent().setName("keyword").setValue(new StringType(next.getTerm())))
|
||||
.addPart(new ParametersParameterComponent().setName("score").setValue(new DecimalType(next.getScore())));
|
||||
//@formatter:on
|
||||
}
|
||||
|
||||
return retVal;
|
||||
}
|
||||
|
||||
/**
|
||||
* /$process-message
|
||||
*/
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
package ca.uhn.fhir.jpa.provider.r5;
|
||||
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
|
||||
import ca.uhn.fhir.jpa.dao.FulltextSearchSvcImpl.Suggestion;
|
||||
import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
|
||||
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||
import ca.uhn.fhir.jpa.provider.BaseJpaSystemProviderDstu2Plus;
|
||||
|
@ -213,38 +212,6 @@ public class JpaSystemProviderR5 extends BaseJpaSystemProviderDstu2Plus<Bundle,
|
|||
return parameters;
|
||||
}
|
||||
|
||||
@Operation(name = JpaConstants.OPERATION_SUGGEST_KEYWORDS, idempotent = true)
|
||||
public Parameters suggestKeywords(
|
||||
@OperationParam(name = "context", min = 1, max = 1) String theContext,
|
||||
@OperationParam(name = "searchParam", min = 1, max = 1) String theSearchParam,
|
||||
@OperationParam(name = "text", min = 1, max = 1) String theText,
|
||||
RequestDetails theRequest) {
|
||||
ca.uhn.fhir.jpa.provider.dstu3.JpaSystemProviderDstu3.validateFulltextSearchEnabled(mySearchDao);
|
||||
|
||||
if (isBlank(theContext)) {
|
||||
throw new InvalidRequestException("Parameter 'context' must be provided");
|
||||
}
|
||||
if (isBlank(theSearchParam)) {
|
||||
throw new InvalidRequestException("Parameter 'searchParam' must be provided");
|
||||
}
|
||||
if (isBlank(theText)) {
|
||||
throw new InvalidRequestException("Parameter 'text' must be provided");
|
||||
}
|
||||
|
||||
List<Suggestion> keywords = mySearchDao.suggestKeywords(theContext, theSearchParam, theText, theRequest);
|
||||
|
||||
Parameters retVal = new Parameters();
|
||||
for (Suggestion next : keywords) {
|
||||
//@formatter:off
|
||||
retVal.addParameter()
|
||||
.addPart(new ParametersParameterComponent().setName("keyword").setValue(new StringType(next.getTerm())))
|
||||
.addPart(new ParametersParameterComponent().setName("score").setValue(new DecimalType(next.getScore())));
|
||||
//@formatter:on
|
||||
}
|
||||
|
||||
return retVal;
|
||||
}
|
||||
|
||||
/**
|
||||
* /$process-message
|
||||
*/
|
||||
|
|
|
@ -1,55 +0,0 @@
|
|||
package ca.uhn.fhir.jpa.search;
|
||||
|
||||
/*
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import org.hibernate.search.indexes.interceptor.EntityIndexingInterceptor;
|
||||
import org.hibernate.search.indexes.interceptor.IndexingOverride;
|
||||
|
||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||
|
||||
public class DeferConceptIndexingInterceptor implements EntityIndexingInterceptor<TermConcept> {
|
||||
|
||||
@Override
|
||||
public IndexingOverride onAdd(TermConcept theEntity) {
|
||||
if (theEntity.getIndexStatus() == null) {
|
||||
return IndexingOverride.SKIP;
|
||||
}
|
||||
|
||||
return IndexingOverride.APPLY_DEFAULT;
|
||||
}
|
||||
|
||||
@Override
|
||||
public IndexingOverride onCollectionUpdate(TermConcept theEntity) {
|
||||
return IndexingOverride.APPLY_DEFAULT;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public IndexingOverride onDelete(TermConcept theEntity) {
|
||||
return IndexingOverride.APPLY_DEFAULT;
|
||||
}
|
||||
|
||||
@Override
|
||||
public IndexingOverride onUpdate(TermConcept theEntity) {
|
||||
return onAdd(theEntity);
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,53 @@
|
|||
package ca.uhn.fhir.jpa.search;
|
||||
|
||||
/*
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||
import org.hibernate.search.mapper.pojo.bridge.RoutingBridge;
|
||||
import org.hibernate.search.mapper.pojo.bridge.binding.RoutingBindingContext;
|
||||
import org.hibernate.search.mapper.pojo.bridge.mapping.programmatic.RoutingBinder;
|
||||
import org.hibernate.search.mapper.pojo.bridge.runtime.RoutingBridgeRouteContext;
|
||||
import org.hibernate.search.mapper.pojo.route.DocumentRoutes;
|
||||
|
||||
public class DeferConceptIndexingRoutingBinder implements RoutingBinder {
|
||||
@Override
|
||||
public void bind(RoutingBindingContext theRoutingBindingContext) {
|
||||
theRoutingBindingContext.dependencies().use("myIndexStatus");
|
||||
|
||||
theRoutingBindingContext.bridge(TermConcept.class, new TermConceptBridge());
|
||||
}
|
||||
|
||||
private class TermConceptBridge implements RoutingBridge<TermConcept> {
|
||||
@Override
|
||||
public void route(DocumentRoutes theDocumentRoutes, Object theO, TermConcept theTermConcept, RoutingBridgeRouteContext theRoutingBridgeRouteContext) {
|
||||
if (theTermConcept.getIndexStatus() == null) {
|
||||
theDocumentRoutes.notIndexed();
|
||||
} else {
|
||||
theDocumentRoutes.addRoute();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void previousRoutes(DocumentRoutes theDocumentRoutes, Object theO, TermConcept theTermConcept, RoutingBridgeRouteContext theRoutingBridgeRouteContext) {
|
||||
theDocumentRoutes.addRoute();
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,91 @@
|
|||
package ca.uhn.fhir.jpa.search;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import com.sun.xml.bind.api.impl.NameConverter;
|
||||
import org.apache.lucene.analysis.core.KeywordTokenizerFactory;
|
||||
import org.apache.lucene.analysis.core.LowerCaseFilterFactory;
|
||||
import org.apache.lucene.analysis.core.StopFilterFactory;
|
||||
import org.apache.lucene.analysis.core.WhitespaceTokenizerFactory;
|
||||
import org.apache.lucene.analysis.miscellaneous.WordDelimiterFilterFactory;
|
||||
import org.apache.lucene.analysis.ngram.EdgeNGramFilterFactory;
|
||||
import org.apache.lucene.analysis.ngram.NGramFilterFactory;
|
||||
import org.apache.lucene.analysis.pattern.PatternTokenizerFactory;
|
||||
import org.apache.lucene.analysis.phonetic.PhoneticFilterFactory;
|
||||
import org.apache.lucene.analysis.snowball.SnowballPorterFilterFactory;
|
||||
import org.apache.lucene.analysis.standard.StandardTokenizerFactory;
|
||||
import org.hibernate.search.backend.lucene.analysis.LuceneAnalysisConfigurationContext;
|
||||
import org.hibernate.search.backend.lucene.analysis.LuceneAnalysisConfigurer;
|
||||
import org.springframework.stereotype.Component;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
/**
|
||||
* Factory for defining the analysers.
|
||||
*/
|
||||
@Component
|
||||
public class HapiLuceneAnalysisConfigurer implements LuceneAnalysisConfigurer {
|
||||
|
||||
@Override
|
||||
public void configure(LuceneAnalysisConfigurationContext theLuceneCtx) {
|
||||
theLuceneCtx.analyzer("autocompleteEdgeAnalyzer").custom()
|
||||
.tokenizer(PatternTokenizerFactory.class).param("pattern", "(.*)").param("group", "1")
|
||||
.tokenFilter(LowerCaseFilterFactory.class)
|
||||
.tokenFilter(StopFilterFactory.class)
|
||||
.tokenFilter(EdgeNGramFilterFactory.class)
|
||||
.param("minGramSize", "3")
|
||||
.param("maxGramSize", "50");
|
||||
|
||||
theLuceneCtx.analyzer("autocompletePhoneticAnalyzer").custom()
|
||||
.tokenizer(StandardTokenizerFactory.class)
|
||||
.tokenFilter(StopFilterFactory.class)
|
||||
.tokenFilter(PhoneticFilterFactory.class).param("encoder", "DoubleMetaphone")
|
||||
.tokenFilter(SnowballPorterFilterFactory.class).param("language", "English");
|
||||
|
||||
theLuceneCtx.analyzer("autocompleteNGramAnalyzer").custom()
|
||||
.tokenizer(StandardTokenizerFactory.class)
|
||||
.tokenFilter(WordDelimiterFilterFactory.class)
|
||||
.tokenFilter(LowerCaseFilterFactory.class)
|
||||
.tokenFilter(NGramFilterFactory.class)
|
||||
.param("minGramSize", "3")
|
||||
.param("maxGramSize", "20");
|
||||
|
||||
theLuceneCtx.analyzer("autocompleteWordEdgeAnalyzer").custom()
|
||||
.tokenizer(StandardTokenizerFactory.class)
|
||||
.tokenFilter(LowerCaseFilterFactory.class)
|
||||
.tokenFilter(StopFilterFactory.class)
|
||||
.tokenFilter(EdgeNGramFilterFactory.class)
|
||||
.param("minGramSize", "2")
|
||||
.param("maxGramSize", "20");
|
||||
|
||||
theLuceneCtx.analyzer("standardAnalyzer").custom()
|
||||
.tokenizer(StandardTokenizerFactory.class)
|
||||
.tokenFilter(LowerCaseFilterFactory.class);
|
||||
|
||||
theLuceneCtx.analyzer("exactAnalyzer").custom()
|
||||
.tokenizer(KeywordTokenizerFactory.class);
|
||||
|
||||
theLuceneCtx.analyzer("conceptParentPidsAnalyzer").custom()
|
||||
.tokenizer(WhitespaceTokenizerFactory.class);
|
||||
|
||||
theLuceneCtx.analyzer("termConceptPropertyAnalyzer").custom()
|
||||
.tokenizer(WhitespaceTokenizerFactory.class);
|
||||
}
|
||||
}
|
|
@ -1,78 +0,0 @@
|
|||
package ca.uhn.fhir.jpa.search;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import org.apache.lucene.analysis.core.*;
|
||||
import org.apache.lucene.analysis.miscellaneous.WordDelimiterFilterFactory;
|
||||
import org.apache.lucene.analysis.ngram.EdgeNGramFilterFactory;
|
||||
import org.apache.lucene.analysis.ngram.NGramFilterFactory;
|
||||
import org.apache.lucene.analysis.pattern.PatternTokenizerFactory;
|
||||
import org.apache.lucene.analysis.phonetic.PhoneticFilterFactory;
|
||||
import org.apache.lucene.analysis.snowball.SnowballPorterFilterFactory;
|
||||
import org.apache.lucene.analysis.standard.StandardFilterFactory;
|
||||
import org.apache.lucene.analysis.standard.StandardTokenizerFactory;
|
||||
import org.hibernate.search.annotations.Factory;
|
||||
import org.hibernate.search.cfg.SearchMapping;
|
||||
|
||||
/**
|
||||
* Factory for defining the analysers.
|
||||
*/
|
||||
public class LuceneSearchMappingFactory {
|
||||
@Factory
|
||||
public SearchMapping getSearchMapping() {
|
||||
SearchMapping mapping = new SearchMapping();
|
||||
|
||||
mapping.analyzerDef("autocompleteEdgeAnalyzer", PatternTokenizerFactory.class)
|
||||
.tokenizerParam("pattern", "(.*)")
|
||||
.tokenizerParam("group", "1")
|
||||
.filter(LowerCaseFilterFactory.class)
|
||||
.filter(StopFilterFactory.class)
|
||||
.filter(EdgeNGramFilterFactory.class)
|
||||
.param("minGramSize", "3")
|
||||
.param("maxGramSize", "50")
|
||||
.analyzerDef("autocompleteWordEdgeAnalyzer", StandardTokenizerFactory.class)
|
||||
.filter(LowerCaseFilterFactory.class)
|
||||
.filter(StopFilterFactory.class)
|
||||
.filter(EdgeNGramFilterFactory.class)
|
||||
.param("minGramSize", "3")
|
||||
.param("maxGramSize", "20")
|
||||
.analyzerDef("autocompletePhoneticAnalyzer", StandardTokenizerFactory.class)
|
||||
.filter(StandardFilterFactory.class)
|
||||
.filter(StopFilterFactory.class)
|
||||
.filter(PhoneticFilterFactory.class)
|
||||
.param("encoder", "DoubleMetaphone")
|
||||
.filter(SnowballPorterFilterFactory.class)
|
||||
.param("language", "English")
|
||||
.analyzerDef("autocompleteNGramAnalyzer", StandardTokenizerFactory.class)
|
||||
.filter(WordDelimiterFilterFactory.class)
|
||||
.filter(LowerCaseFilterFactory.class)
|
||||
.filter(NGramFilterFactory.class)
|
||||
.param("minGramSize", "3")
|
||||
.param("maxGramSize", "20")
|
||||
.analyzerDef("standardAnalyzer", StandardTokenizerFactory.class)
|
||||
.filter(LowerCaseFilterFactory.class)
|
||||
.analyzerDef("exactAnalyzer", KeywordTokenizerFactory.class)
|
||||
.analyzerDef("conceptParentPidsAnalyzer", WhitespaceTokenizerFactory.class)
|
||||
.analyzerDef("termConceptPropertyAnalyzer", WhitespaceTokenizerFactory.class);
|
||||
|
||||
return mapping;
|
||||
}
|
||||
}
|
|
@ -159,7 +159,7 @@ public class QueryStack {
|
|||
|
||||
Condition hashIdentityPredicate = sortPredicateBuilder.createHashIdentityPredicate(theResourceName, theParamName);
|
||||
mySqlBuilder.addPredicate(hashIdentityPredicate);
|
||||
mySqlBuilder.addSort(sortPredicateBuilder.getColumnValueLow(), theAscending);
|
||||
mySqlBuilder.addSortDate(sortPredicateBuilder.getColumnValueLow(), theAscending);
|
||||
}
|
||||
|
||||
public void addSortOnLastUpdated(boolean theAscending) {
|
||||
|
@ -170,7 +170,7 @@ public class QueryStack {
|
|||
} else {
|
||||
resourceTablePredicateBuilder = mySqlBuilder.addResourceTablePredicateBuilder(firstPredicateBuilder.getResourceIdColumn());
|
||||
}
|
||||
mySqlBuilder.addSort(resourceTablePredicateBuilder.getColumnLastUpdated(), theAscending);
|
||||
mySqlBuilder.addSortDate(resourceTablePredicateBuilder.getColumnLastUpdated(), theAscending);
|
||||
}
|
||||
|
||||
|
||||
|
@ -180,7 +180,7 @@ public class QueryStack {
|
|||
|
||||
Condition hashIdentityPredicate = sortPredicateBuilder.createHashIdentityPredicate(theResourceName, theParamName);
|
||||
mySqlBuilder.addPredicate(hashIdentityPredicate);
|
||||
mySqlBuilder.addSort(sortPredicateBuilder.getColumnValue(), theAscending);
|
||||
mySqlBuilder.addSortNumeric(sortPredicateBuilder.getColumnValue(), theAscending);
|
||||
}
|
||||
|
||||
public void addSortOnQuantity(String theResourceName, String theParamName, boolean theAscending) {
|
||||
|
@ -189,18 +189,18 @@ public class QueryStack {
|
|||
|
||||
Condition hashIdentityPredicate = sortPredicateBuilder.createHashIdentityPredicate(theResourceName, theParamName);
|
||||
mySqlBuilder.addPredicate(hashIdentityPredicate);
|
||||
mySqlBuilder.addSort(sortPredicateBuilder.getColumnValue(), theAscending);
|
||||
mySqlBuilder.addSortNumeric(sortPredicateBuilder.getColumnValue(), theAscending);
|
||||
}
|
||||
|
||||
public void addSortOnResourceId(boolean theAscending) {
|
||||
BaseJoiningPredicateBuilder firstPredicateBuilder = mySqlBuilder.getOrCreateFirstPredicateBuilder();
|
||||
ForcedIdPredicateBuilder sortPredicateBuilder = mySqlBuilder.addForcedIdPredicateBuilder(firstPredicateBuilder.getResourceIdColumn());
|
||||
if (!theAscending) {
|
||||
mySqlBuilder.addSort(sortPredicateBuilder.getColumnForcedId(), false, OrderObject.NullOrder.FIRST);
|
||||
mySqlBuilder.addSortString(sortPredicateBuilder.getColumnForcedId(), false, OrderObject.NullOrder.FIRST);
|
||||
} else {
|
||||
mySqlBuilder.addSort(sortPredicateBuilder.getColumnForcedId(), true);
|
||||
mySqlBuilder.addSortString(sortPredicateBuilder.getColumnForcedId(), true);
|
||||
}
|
||||
mySqlBuilder.addSort(firstPredicateBuilder.getResourceIdColumn(), theAscending);
|
||||
mySqlBuilder.addSortNumeric(firstPredicateBuilder.getResourceIdColumn(), theAscending);
|
||||
|
||||
}
|
||||
|
||||
|
@ -210,7 +210,7 @@ public class QueryStack {
|
|||
|
||||
Condition pathPredicate = sortPredicateBuilder.createPredicateSourcePaths(theResourceName, theParamName);
|
||||
mySqlBuilder.addPredicate(pathPredicate);
|
||||
mySqlBuilder.addSort(sortPredicateBuilder.getColumnTargetResourceId(), theAscending);
|
||||
mySqlBuilder.addSortNumeric(sortPredicateBuilder.getColumnTargetResourceId(), theAscending);
|
||||
}
|
||||
|
||||
|
||||
|
@ -220,7 +220,7 @@ public class QueryStack {
|
|||
|
||||
Condition hashIdentityPredicate = sortPredicateBuilder.createHashIdentityPredicate(theResourceName, theParamName);
|
||||
mySqlBuilder.addPredicate(hashIdentityPredicate);
|
||||
mySqlBuilder.addSort(sortPredicateBuilder.getColumnValueNormalized(), theAscending);
|
||||
mySqlBuilder.addSortString(sortPredicateBuilder.getColumnValueNormalized(), theAscending);
|
||||
}
|
||||
|
||||
public void addSortOnToken(String theResourceName, String theParamName, boolean theAscending) {
|
||||
|
@ -229,8 +229,8 @@ public class QueryStack {
|
|||
|
||||
Condition hashIdentityPredicate = sortPredicateBuilder.createHashIdentityPredicate(theResourceName, theParamName);
|
||||
mySqlBuilder.addPredicate(hashIdentityPredicate);
|
||||
mySqlBuilder.addSort(sortPredicateBuilder.getColumnSystem(), theAscending);
|
||||
mySqlBuilder.addSort(sortPredicateBuilder.getColumnValue(), theAscending);
|
||||
mySqlBuilder.addSortString(sortPredicateBuilder.getColumnSystem(), theAscending);
|
||||
mySqlBuilder.addSortString(sortPredicateBuilder.getColumnValue(), theAscending);
|
||||
}
|
||||
|
||||
public void addSortOnUri(String theResourceName, String theParamName, boolean theAscending) {
|
||||
|
@ -239,7 +239,7 @@ public class QueryStack {
|
|||
|
||||
Condition hashIdentityPredicate = sortPredicateBuilder.createHashIdentityPredicate(theResourceName, theParamName);
|
||||
mySqlBuilder.addPredicate(hashIdentityPredicate);
|
||||
mySqlBuilder.addSort(sortPredicateBuilder.getColumnValue(), theAscending);
|
||||
mySqlBuilder.addSortString(sortPredicateBuilder.getColumnValue(), theAscending);
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -31,7 +31,7 @@ import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
|||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.dao.IDao;
|
||||
import ca.uhn.fhir.jpa.config.HapiFhirLocalContainerEntityManagerFactoryBean;
|
||||
import ca.uhn.fhir.jpa.config.HibernateDialectProvider;
|
||||
import ca.uhn.fhir.jpa.config.HibernatePropertiesProvider;
|
||||
import ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
|
||||
import ca.uhn.fhir.jpa.dao.IResultIterator;
|
||||
|
@ -173,7 +173,7 @@ public class SearchBuilder implements ISearchBuilder {
|
|||
@Autowired
|
||||
private SqlObjectFactory mySqlBuilderFactory;
|
||||
@Autowired
|
||||
private HibernateDialectProvider myDialectProvider;
|
||||
private HibernatePropertiesProvider myDialectProvider;
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
|
|
|
@ -25,7 +25,6 @@ import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
|||
import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryBuilder;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.jpa.util.CoordCalculator;
|
||||
import ca.uhn.fhir.jpa.util.SearchBox;
|
||||
import ca.uhn.fhir.model.api.IQueryParameterType;
|
||||
import ca.uhn.fhir.model.dstu2.resource.Location;
|
||||
import ca.uhn.fhir.rest.param.QuantityParam;
|
||||
|
@ -35,6 +34,7 @@ import com.healthmarketscience.sqlbuilder.BinaryCondition;
|
|||
import com.healthmarketscience.sqlbuilder.ComboCondition;
|
||||
import com.healthmarketscience.sqlbuilder.Condition;
|
||||
import com.healthmarketscience.sqlbuilder.dbspec.basic.DbColumn;
|
||||
import org.hibernate.search.engine.spatial.GeoBoundingBox;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.isBlank;
|
||||
|
||||
|
@ -115,7 +115,7 @@ public class CoordsPredicateBuilder extends BaseSearchParamPredicateBuilder {
|
|||
double latitudeDegrees = Double.parseDouble(latitudeValue);
|
||||
double longitudeDegrees = Double.parseDouble(longitudeValue);
|
||||
|
||||
SearchBox box = CoordCalculator.getBox(latitudeDegrees, longitudeDegrees, distanceKm);
|
||||
GeoBoundingBox box = CoordCalculator.getBox(latitudeDegrees, longitudeDegrees, distanceKm);
|
||||
latitudePredicate = theFrom.createLatitudePredicateFromBox(box);
|
||||
longitudePredicate = theFrom.createLongitudePredicateFromBox(box);
|
||||
}
|
||||
|
@ -132,23 +132,23 @@ public class CoordsPredicateBuilder extends BaseSearchParamPredicateBuilder {
|
|||
return BinaryCondition.equalTo(myColumnLongitude, generatePlaceholder(theLongitudeValue));
|
||||
}
|
||||
|
||||
public Condition createLatitudePredicateFromBox(SearchBox theBox) {
|
||||
public Condition createLatitudePredicateFromBox(GeoBoundingBox theBox) {
|
||||
return ComboCondition.and(
|
||||
BinaryCondition.greaterThanOrEq(myColumnLatitude, generatePlaceholder(theBox.getSouthWest().getLatitude())),
|
||||
BinaryCondition.lessThanOrEq(myColumnLatitude, generatePlaceholder(theBox.getNorthEast().getLatitude()))
|
||||
BinaryCondition.greaterThanOrEq(myColumnLatitude, generatePlaceholder(theBox.bottomRight().latitude())),
|
||||
BinaryCondition.lessThanOrEq(myColumnLatitude, generatePlaceholder(theBox.topLeft().latitude()))
|
||||
);
|
||||
}
|
||||
|
||||
public Condition createLongitudePredicateFromBox(SearchBox theBox) {
|
||||
if (theBox.crossesAntiMeridian()) {
|
||||
public Condition createLongitudePredicateFromBox(GeoBoundingBox theBox) {
|
||||
if (theBox.bottomRight().longitude() < theBox.topLeft().longitude()) {
|
||||
return ComboCondition.or(
|
||||
BinaryCondition.greaterThanOrEq(myColumnLongitude, generatePlaceholder(theBox.getNorthEast().getLongitude())),
|
||||
BinaryCondition.lessThanOrEq(myColumnLongitude, generatePlaceholder(theBox.getSouthWest().getLongitude()))
|
||||
BinaryCondition.greaterThanOrEq(myColumnLongitude, generatePlaceholder(theBox.bottomRight().longitude())),
|
||||
BinaryCondition.lessThanOrEq(myColumnLongitude, generatePlaceholder(theBox.topLeft().longitude()))
|
||||
);
|
||||
}
|
||||
return ComboCondition.and(
|
||||
BinaryCondition.greaterThanOrEq(myColumnLongitude, generatePlaceholder(theBox.getSouthWest().getLongitude())),
|
||||
BinaryCondition.lessThanOrEq(myColumnLongitude, generatePlaceholder(theBox.getNorthEast().getLongitude()))
|
||||
BinaryCondition.greaterThanOrEq(myColumnLongitude, generatePlaceholder(theBox.topLeft().longitude())),
|
||||
BinaryCondition.lessThanOrEq(myColumnLongitude, generatePlaceholder(theBox.bottomRight().longitude()))
|
||||
);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -22,7 +22,7 @@ package ca.uhn.fhir.jpa.search.builder.sql;
|
|||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.config.HibernateDialectProvider;
|
||||
import ca.uhn.fhir.jpa.config.HibernatePropertiesProvider;
|
||||
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
||||
import ca.uhn.fhir.jpa.model.entity.ModelConfig;
|
||||
import ca.uhn.fhir.jpa.search.builder.QueryStack;
|
||||
|
@ -95,11 +95,13 @@ public class SearchQueryBuilder {
|
|||
private ResourceTablePredicateBuilder myResourceTableRoot;
|
||||
private boolean myHaveAtLeastOnePredicate;
|
||||
private BaseJoiningPredicateBuilder myFirstPredicateBuilder;
|
||||
private boolean dialectIsMsSql;
|
||||
private boolean dialectIsMySql;
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
*/
|
||||
public SearchQueryBuilder(FhirContext theFhirContext, ModelConfig theModelConfig, PartitionSettings thePartitionSettings, RequestPartitionId theRequestPartitionId, String theResourceType, SqlObjectFactory theSqlBuilderFactory, HibernateDialectProvider theDialectProvider, boolean theCountQuery) {
|
||||
public SearchQueryBuilder(FhirContext theFhirContext, ModelConfig theModelConfig, PartitionSettings thePartitionSettings, RequestPartitionId theRequestPartitionId, String theResourceType, SqlObjectFactory theSqlBuilderFactory, HibernatePropertiesProvider theDialectProvider, boolean theCountQuery) {
|
||||
this(theFhirContext, theModelConfig, thePartitionSettings, theRequestPartitionId, theResourceType, theSqlBuilderFactory, UUID.randomUUID().toString() + "-", theDialectProvider.getDialect(), theCountQuery, new ArrayList<>());
|
||||
}
|
||||
|
||||
|
@ -115,6 +117,13 @@ public class SearchQueryBuilder {
|
|||
mySqlBuilderFactory = theSqlBuilderFactory;
|
||||
myCountQuery = theCountQuery;
|
||||
myDialect = theDialect;
|
||||
if (myDialect instanceof org.hibernate.dialect.MySQLDialect){
|
||||
dialectIsMySql = true;
|
||||
}
|
||||
if (myDialect instanceof org.hibernate.dialect.SQLServerDialect){
|
||||
dialectIsMsSql = true;
|
||||
}
|
||||
|
||||
|
||||
mySpec = new DbSpec();
|
||||
mySchema = mySpec.addDefaultSchema();
|
||||
|
@ -550,12 +559,92 @@ public class SearchQueryBuilder {
|
|||
return myHaveAtLeastOnePredicate;
|
||||
}
|
||||
|
||||
public void addSort(DbColumn theColumnValueNormalized, boolean theAscending) {
|
||||
public void addSortString(DbColumn theColumnValueNormalized, boolean theAscending) {
|
||||
OrderObject.NullOrder nullOrder = OrderObject.NullOrder.LAST;
|
||||
addSort(theColumnValueNormalized, theAscending, nullOrder);
|
||||
addSortString(theColumnValueNormalized, theAscending, nullOrder);
|
||||
}
|
||||
|
||||
public void addSort(DbColumn theTheColumnValueNormalized, boolean theTheAscending, OrderObject.NullOrder theNullOrder) {
|
||||
public void addSortNumeric(DbColumn theColumnValueNormalized, boolean theAscending) {
|
||||
OrderObject.NullOrder nullOrder = OrderObject.NullOrder.LAST;
|
||||
addSortNumeric(theColumnValueNormalized, theAscending, nullOrder);
|
||||
}
|
||||
|
||||
public void addSortDate(DbColumn theColumnValueNormalized, boolean theAscending) {
|
||||
OrderObject.NullOrder nullOrder = OrderObject.NullOrder.LAST;
|
||||
addSortDate(theColumnValueNormalized, theAscending, nullOrder);
|
||||
}
|
||||
|
||||
public void addSortString(DbColumn theTheColumnValueNormalized, boolean theTheAscending, OrderObject.NullOrder theNullOrder) {
|
||||
if ((dialectIsMySql || dialectIsMsSql)) {
|
||||
// MariaDB, MySQL and MSSQL do not support "NULLS FIRST" and "NULLS LAST" syntax.
|
||||
String direction = theTheAscending ? " ASC" : " DESC";
|
||||
String sortColumnName = theTheColumnValueNormalized.getTable().getAlias() + "." + theTheColumnValueNormalized.getName();
|
||||
final StringBuilder sortColumnNameBuilder = new StringBuilder();
|
||||
// The following block has been commented out for performance.
|
||||
// Uncomment if NullOrder is needed for MariaDB, MySQL or MSSQL
|
||||
/*
|
||||
// Null values are always treated as less than non-null values.
|
||||
if ((theTheAscending && theNullOrder == OrderObject.NullOrder.LAST)
|
||||
|| (!theTheAscending && theNullOrder == OrderObject.NullOrder.FIRST)) {
|
||||
// In this case, precede the "order by" column with a case statement that returns
|
||||
// 1 for null and 0 non-null so that nulls will be sorted as greater than non-nulls.
|
||||
sortColumnNameBuilder.append( "CASE WHEN " ).append( sortColumnName ).append( " IS NULL THEN 1 ELSE 0 END" ).append(direction).append(", ");
|
||||
}
|
||||
*/
|
||||
sortColumnNameBuilder.append(sortColumnName).append(direction);
|
||||
mySelect.addCustomOrderings(sortColumnNameBuilder.toString());
|
||||
} else {
|
||||
addSort(theTheColumnValueNormalized, theTheAscending, theNullOrder);
|
||||
}
|
||||
}
|
||||
|
||||
public void addSortNumeric(DbColumn theTheColumnValueNormalized, boolean theTheAscending, OrderObject.NullOrder theNullOrder) {
|
||||
if ((dialectIsMySql || dialectIsMsSql)) {
|
||||
// MariaDB, MySQL and MSSQL do not support "NULLS FIRST" and "NULLS LAST" syntax.
|
||||
// Null values are always treated as less than non-null values.
|
||||
// As such special handling is required here.
|
||||
String direction;
|
||||
String sortColumnName = theTheColumnValueNormalized.getTable().getAlias() + "." + theTheColumnValueNormalized.getName();
|
||||
if ((theTheAscending && theNullOrder == OrderObject.NullOrder.LAST)
|
||||
|| (!theTheAscending && theNullOrder == OrderObject.NullOrder.FIRST)) {
|
||||
// Negating the numeric column value and reversing the sort order will ensure that the rows appear
|
||||
// in the correct order with nulls appearing first or last as needed.
|
||||
direction = theTheAscending ? " DESC" : " ASC";
|
||||
sortColumnName = "-" + sortColumnName;
|
||||
} else {
|
||||
direction = theTheAscending ? " ASC" : " DESC";
|
||||
}
|
||||
mySelect.addCustomOrderings(sortColumnName + direction);
|
||||
} else {
|
||||
addSort(theTheColumnValueNormalized, theTheAscending, theNullOrder);
|
||||
}
|
||||
}
|
||||
|
||||
public void addSortDate(DbColumn theTheColumnValueNormalized, boolean theTheAscending, OrderObject.NullOrder theNullOrder) {
|
||||
if ((dialectIsMySql || dialectIsMsSql)) {
|
||||
// MariaDB, MySQL and MSSQL do not support "NULLS FIRST" and "NULLS LAST" syntax.
|
||||
String direction = theTheAscending ? " ASC" : " DESC";
|
||||
String sortColumnName = theTheColumnValueNormalized.getTable().getAlias() + "." + theTheColumnValueNormalized.getName();
|
||||
final StringBuilder sortColumnNameBuilder = new StringBuilder();
|
||||
// The following block has been commented out for performance.
|
||||
// Uncomment if NullOrder is needed for MariaDB, MySQL or MSSQL
|
||||
/*
|
||||
// Null values are always treated as less than non-null values.
|
||||
if ((theTheAscending && theNullOrder == OrderObject.NullOrder.LAST)
|
||||
|| (!theTheAscending && theNullOrder == OrderObject.NullOrder.FIRST)) {
|
||||
// In this case, precede the "order by" column with a case statement that returns
|
||||
// 1 for null and 0 non-null so that nulls will be sorted as greater than non-nulls.
|
||||
sortColumnNameBuilder.append( "CASE WHEN " ).append( sortColumnName ).append( " IS NULL THEN 1 ELSE 0 END" ).append(direction).append(", ");
|
||||
}
|
||||
*/
|
||||
sortColumnNameBuilder.append(sortColumnName).append(direction);
|
||||
mySelect.addCustomOrderings(sortColumnNameBuilder.toString());
|
||||
} else {
|
||||
addSort(theTheColumnValueNormalized, theTheAscending, theNullOrder);
|
||||
}
|
||||
}
|
||||
|
||||
private void addSort(DbColumn theTheColumnValueNormalized, boolean theTheAscending, OrderObject.NullOrder theNullOrder) {
|
||||
OrderObject.Dir direction = theTheAscending ? OrderObject.Dir.ASCENDING : OrderObject.Dir.DESCENDING;
|
||||
OrderObject orderObject = new OrderObject(direction, theTheColumnValueNormalized);
|
||||
orderObject.setNullOrder(theNullOrder);
|
||||
|
|
|
@ -20,29 +20,48 @@ package ca.uhn.fhir.jpa.search.elastic;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.context.ConfigurationException;
|
||||
import ca.uhn.fhir.jpa.search.lastn.ElasticsearchRestClientFactory;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.hibernate.search.cfg.Environment;
|
||||
import org.hibernate.search.elasticsearch.cfg.ElasticsearchEnvironment;
|
||||
import org.hibernate.search.elasticsearch.cfg.ElasticsearchIndexStatus;
|
||||
import org.hibernate.search.elasticsearch.cfg.IndexSchemaManagementStrategy;
|
||||
import org.elasticsearch.action.support.master.AcknowledgedResponse;
|
||||
import org.elasticsearch.client.RequestOptions;
|
||||
import org.elasticsearch.client.RestHighLevelClient;
|
||||
import org.elasticsearch.client.indices.PutIndexTemplateRequest;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.hibernate.search.backend.elasticsearch.index.IndexStatus;
|
||||
import org.hibernate.search.engine.cfg.BackendSettings;
|
||||
import org.hibernate.search.mapper.orm.automaticindexing.session.AutomaticIndexingSynchronizationStrategyNames;
|
||||
import org.hibernate.search.mapper.orm.cfg.HibernateOrmMapperSettings;
|
||||
import org.hibernate.search.backend.elasticsearch.cfg.ElasticsearchBackendSettings;
|
||||
import org.hibernate.search.backend.elasticsearch.cfg.ElasticsearchIndexSettings;
|
||||
import org.hibernate.search.mapper.orm.schema.management.SchemaManagementStrategyName;
|
||||
import org.slf4j.Logger;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.Properties;
|
||||
|
||||
import static org.slf4j.LoggerFactory.getLogger;
|
||||
|
||||
/**
|
||||
* This class is used to inject appropriate properties into a hibernate
|
||||
* Properties object being used to create an entitymanager for a HAPI
|
||||
* FHIR JPA server.
|
||||
* FHIR JPA server. This class also injects a starter template into the ES cluster.
|
||||
*/
|
||||
public class ElasticsearchHibernatePropertiesBuilder {
|
||||
private static final Logger ourLog = getLogger(ElasticsearchHibernatePropertiesBuilder.class);
|
||||
|
||||
|
||||
private IndexStatus myRequiredIndexStatus = IndexStatus.YELLOW.YELLOW;
|
||||
private SchemaManagementStrategyName myIndexSchemaManagementStrategy = SchemaManagementStrategyName.CREATE;
|
||||
|
||||
private ElasticsearchIndexStatus myRequiredIndexStatus = ElasticsearchIndexStatus.YELLOW;
|
||||
private String myRestUrl;
|
||||
private String myUsername;
|
||||
private String myPassword;
|
||||
private IndexSchemaManagementStrategy myIndexSchemaManagementStrategy = IndexSchemaManagementStrategy.CREATE;
|
||||
private long myIndexManagementWaitTimeoutMillis = 10000L;
|
||||
private boolean myDebugRefreshAfterWrite = false;
|
||||
private String myDebugSyncStrategy = AutomaticIndexingSynchronizationStrategyNames.ASYNC;
|
||||
private boolean myDebugPrettyPrintJsonLog = false;
|
||||
private String myProtocol;
|
||||
|
||||
public ElasticsearchHibernatePropertiesBuilder setUsername(String theUsername) {
|
||||
myUsername = theUsername;
|
||||
|
@ -56,36 +75,35 @@ public class ElasticsearchHibernatePropertiesBuilder {
|
|||
|
||||
public void apply(Properties theProperties) {
|
||||
|
||||
// Don't use the Lucene properties as they conflict
|
||||
theProperties.remove("hibernate.search.model_mapping");
|
||||
|
||||
// the below properties are used for ElasticSearch integration
|
||||
theProperties.put("hibernate.search.default." + Environment.INDEX_MANAGER_IMPL_NAME, "elasticsearch");
|
||||
theProperties.put("hibernate.search." + ElasticsearchEnvironment.ANALYSIS_DEFINITION_PROVIDER, ElasticsearchMappingProvider.class.getName());
|
||||
theProperties.put(BackendSettings.backendKey(BackendSettings.TYPE), "elasticsearch");
|
||||
|
||||
|
||||
theProperties.put(BackendSettings.backendKey(ElasticsearchIndexSettings.ANALYSIS_CONFIGURER), HapiElasticsearchAnalysisConfigurer.class.getName());
|
||||
|
||||
theProperties.put(BackendSettings.backendKey(ElasticsearchBackendSettings.HOSTS), myRestUrl);
|
||||
theProperties.put(BackendSettings.backendKey(ElasticsearchBackendSettings.PROTOCOL), myProtocol);
|
||||
|
||||
theProperties.put("hibernate.search.default.elasticsearch.host", myRestUrl);
|
||||
if (StringUtils.isNotBlank(myUsername)) {
|
||||
theProperties.put("hibernate.search.default.elasticsearch.username", myUsername);
|
||||
theProperties.put(BackendSettings.backendKey(ElasticsearchBackendSettings.USERNAME), myUsername);
|
||||
}
|
||||
if (StringUtils.isNotBlank(myPassword)) {
|
||||
theProperties.put("hibernate.search.default.elasticsearch.password", myPassword);
|
||||
theProperties.put(BackendSettings.backendKey(ElasticsearchBackendSettings.PASSWORD), myPassword);
|
||||
}
|
||||
|
||||
theProperties.put("hibernate.search.default." + ElasticsearchEnvironment.INDEX_SCHEMA_MANAGEMENT_STRATEGY, myIndexSchemaManagementStrategy.getExternalName());
|
||||
theProperties.put("hibernate.search.default." + ElasticsearchEnvironment.INDEX_MANAGEMENT_WAIT_TIMEOUT, Long.toString(myIndexManagementWaitTimeoutMillis));
|
||||
theProperties.put("hibernate.search.default." + ElasticsearchEnvironment.REQUIRED_INDEX_STATUS, myRequiredIndexStatus.getElasticsearchString());
|
||||
|
||||
theProperties.put(HibernateOrmMapperSettings.SCHEMA_MANAGEMENT_STRATEGY, myIndexSchemaManagementStrategy.externalRepresentation());
|
||||
theProperties.put(BackendSettings.backendKey(ElasticsearchIndexSettings.SCHEMA_MANAGEMENT_MINIMAL_REQUIRED_STATUS_WAIT_TIMEOUT), Long.toString(myIndexManagementWaitTimeoutMillis));
|
||||
theProperties.put(BackendSettings.backendKey(ElasticsearchIndexSettings.SCHEMA_MANAGEMENT_MINIMAL_REQUIRED_STATUS), myRequiredIndexStatus.externalRepresentation());
|
||||
// Need the mapping to be dynamic because of terminology indexes.
|
||||
theProperties.put("hibernate.search.default.elasticsearch.dynamic_mapping", "true");
|
||||
|
||||
|
||||
theProperties.put(BackendSettings.backendKey(ElasticsearchIndexSettings.DYNAMIC_MAPPING), "true");
|
||||
// Only for unit tests
|
||||
theProperties.put("hibernate.search.default." + ElasticsearchEnvironment.REFRESH_AFTER_WRITE, Boolean.toString(myDebugRefreshAfterWrite));
|
||||
theProperties.put("hibernate.search." + ElasticsearchEnvironment.LOG_JSON_PRETTY_PRINTING, Boolean.toString(myDebugPrettyPrintJsonLog));
|
||||
theProperties.put(HibernateOrmMapperSettings.AUTOMATIC_INDEXING_SYNCHRONIZATION_STRATEGY, myDebugSyncStrategy);
|
||||
theProperties.put(BackendSettings.backendKey(ElasticsearchBackendSettings.LOG_JSON_PRETTY_PRINTING), Boolean.toString(myDebugPrettyPrintJsonLog));
|
||||
|
||||
injectStartupTemplate(myProtocol, myRestUrl, myUsername, myPassword);
|
||||
|
||||
}
|
||||
|
||||
public ElasticsearchHibernatePropertiesBuilder setRequiredIndexStatus(ElasticsearchIndexStatus theRequiredIndexStatus) {
|
||||
public ElasticsearchHibernatePropertiesBuilder setRequiredIndexStatus(IndexStatus theRequiredIndexStatus) {
|
||||
myRequiredIndexStatus = theRequiredIndexStatus;
|
||||
return this;
|
||||
}
|
||||
|
@ -95,7 +113,12 @@ public class ElasticsearchHibernatePropertiesBuilder {
|
|||
return this;
|
||||
}
|
||||
|
||||
public ElasticsearchHibernatePropertiesBuilder setIndexSchemaManagementStrategy(IndexSchemaManagementStrategy theIndexSchemaManagementStrategy) {
|
||||
public ElasticsearchHibernatePropertiesBuilder setProtocol(String theProtocol) {
|
||||
myProtocol = theProtocol;
|
||||
return this;
|
||||
}
|
||||
|
||||
public ElasticsearchHibernatePropertiesBuilder setIndexSchemaManagementStrategy(SchemaManagementStrategyName theIndexSchemaManagementStrategy) {
|
||||
myIndexSchemaManagementStrategy = theIndexSchemaManagementStrategy;
|
||||
return this;
|
||||
}
|
||||
|
@ -105,15 +128,40 @@ public class ElasticsearchHibernatePropertiesBuilder {
|
|||
return this;
|
||||
}
|
||||
|
||||
public ElasticsearchHibernatePropertiesBuilder setDebugRefreshAfterWrite(boolean theDebugRefreshAfterWrite) {
|
||||
myDebugRefreshAfterWrite = theDebugRefreshAfterWrite;
|
||||
public ElasticsearchHibernatePropertiesBuilder setDebugIndexSyncStrategy(String theSyncStrategy) {
|
||||
myDebugSyncStrategy = theSyncStrategy;
|
||||
return this;
|
||||
}
|
||||
|
||||
|
||||
public ElasticsearchHibernatePropertiesBuilder setDebugPrettyPrintJsonLog(boolean theDebugPrettyPrintJsonLog) {
|
||||
myDebugPrettyPrintJsonLog = theDebugPrettyPrintJsonLog;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* At startup time, injects a template into the elasticsearch cluster, which is needed for handling large ngram diffs.
|
||||
* TODO GGG HS: In HS6.1, we should have a native way of performing index settings manipulation at bootstrap time, so this should
|
||||
* eventually be removed in favour of whatever solution they come up with.
|
||||
*/
|
||||
private void injectStartupTemplate(String theProtocol, String theHostAndPort, String theUsername, String thePassword) {
|
||||
PutIndexTemplateRequest ngramTemplate = new PutIndexTemplateRequest("ngram-template")
|
||||
.patterns(Arrays.asList("resourcetable-*", "termconcept-*"))
|
||||
.settings(Settings.builder().put("index.max_ngram_diff", 50));
|
||||
|
||||
int colonIndex = theHostAndPort.indexOf(":");
|
||||
String host = theHostAndPort.substring(0, colonIndex);
|
||||
Integer port = Integer.valueOf(theHostAndPort.substring(colonIndex + 1));
|
||||
String qualifiedHost = theProtocol + "://" + host;
|
||||
|
||||
try {
|
||||
RestHighLevelClient elasticsearchHighLevelRestClient = ElasticsearchRestClientFactory.createElasticsearchHighLevelRestClient(qualifiedHost, port, theUsername, thePassword);
|
||||
ourLog.info("Adding starter template for large ngram diffs");
|
||||
AcknowledgedResponse acknowledgedResponse = elasticsearchHighLevelRestClient.indices().putTemplate(ngramTemplate, RequestOptions.DEFAULT);
|
||||
assert acknowledgedResponse.isAcknowledged();
|
||||
} catch (IOException theE) {
|
||||
theE.printStackTrace();
|
||||
throw new ConfigurationException("Couldn't connect to the elasticsearch server to create necessary templates. Ensure the Elasticsearch user has permissions to create templates.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,69 +0,0 @@
|
|||
package ca.uhn.fhir.jpa.search.elastic;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import org.hibernate.search.elasticsearch.analyzer.definition.ElasticsearchAnalysisDefinitionProvider;
|
||||
import org.hibernate.search.elasticsearch.analyzer.definition.ElasticsearchAnalysisDefinitionRegistryBuilder;
|
||||
|
||||
public class ElasticsearchMappingProvider implements ElasticsearchAnalysisDefinitionProvider {
|
||||
|
||||
@Override
|
||||
public void register(ElasticsearchAnalysisDefinitionRegistryBuilder builder) {
|
||||
builder.analyzer("autocompleteEdgeAnalyzer")
|
||||
.withTokenizer("pattern_all")
|
||||
.withTokenFilters("lowercase", "stop", "edgengram_3_50");
|
||||
builder.tokenizer("pattern_all").type("pattern").param("pattern", "(.*)").param("group", "1");
|
||||
builder.tokenFilter("edgengram_3_50")
|
||||
.type("edgeNGram")
|
||||
.param("min_gram", "3")
|
||||
.param("max_gram", "50");
|
||||
|
||||
builder.analyzer("autocompleteWordEdgeAnalyzer")
|
||||
.withTokenizer("standard")
|
||||
.withTokenFilters("lowercase", "stop", "wordedgengram_3_50");
|
||||
builder.tokenFilter("wordedgengram_3_50")
|
||||
.type("edgeNGram")
|
||||
.param("min_gram", "3")
|
||||
.param("max_gram", "20");
|
||||
|
||||
builder.analyzer("autocompletePhoneticAnalyzer")
|
||||
.withTokenizer("standard")
|
||||
.withTokenFilters("standard", "stop", "snowball_english");
|
||||
builder.tokenFilter("snowball_english").type("snowball").param("language", "English");
|
||||
|
||||
builder.analyzer("autocompleteNGramAnalyzer")
|
||||
.withTokenizer("standard")
|
||||
.withTokenFilters("word_delimiter", "lowercase", "ngram_3_20");
|
||||
builder.tokenFilter("ngram_3_20")
|
||||
.type("nGram")
|
||||
.param("min_gram", "3")
|
||||
.param("max_gram", "20");
|
||||
|
||||
builder.analyzer("standardAnalyzer").withTokenizer("standard").withTokenFilters("lowercase");
|
||||
|
||||
builder.analyzer("exactAnalyzer").withTokenizer("keyword");
|
||||
|
||||
builder.analyzer("conceptParentPidsAnalyzer").withTokenizer("whitespace");
|
||||
|
||||
builder.analyzer("termConceptPropertyAnalyzer").withTokenizer("whitespace");
|
||||
|
||||
}
|
||||
}
|
|
@ -0,0 +1,87 @@
|
|||
package ca.uhn.fhir.jpa.search.elastic;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import org.hibernate.search.backend.elasticsearch.analysis.ElasticsearchAnalysisConfigurationContext;
|
||||
import org.hibernate.search.backend.elasticsearch.analysis.ElasticsearchAnalysisConfigurer;
|
||||
|
||||
public class HapiElasticsearchAnalysisConfigurer implements ElasticsearchAnalysisConfigurer{
|
||||
|
||||
@Override
|
||||
public void configure(ElasticsearchAnalysisConfigurationContext theConfigCtx) {
|
||||
|
||||
theConfigCtx.analyzer("autocompleteEdgeAnalyzer").custom()
|
||||
.tokenizer("pattern_all")
|
||||
.tokenFilters("lowercase", "stop", "edgengram_3_50");
|
||||
|
||||
theConfigCtx.tokenizer("pattern_all")
|
||||
.type("pattern")
|
||||
.param("pattern", "(.*)")
|
||||
.param("group", "1");
|
||||
|
||||
theConfigCtx.tokenFilter("edgengram_3_50")
|
||||
.type("edgeNGram")
|
||||
.param("min_gram", "3")
|
||||
.param("max_gram", "50");
|
||||
|
||||
|
||||
theConfigCtx.analyzer("autocompleteWordEdgeAnalyzer").custom()
|
||||
.tokenizer("standard")
|
||||
.tokenFilters("lowercase", "stop", "wordedgengram_3_50");
|
||||
|
||||
theConfigCtx.tokenFilter("wordedgengram_3_50")
|
||||
.type("edgeNGram")
|
||||
.param("min_gram", "2")
|
||||
.param("max_gram", "20");
|
||||
|
||||
theConfigCtx.analyzer("autocompletePhoneticAnalyzer").custom()
|
||||
.tokenizer("standard")
|
||||
.tokenFilters("stop", "snowball_english");
|
||||
|
||||
theConfigCtx.tokenFilter("snowball_english")
|
||||
.type("snowball")
|
||||
.param("language", "English");
|
||||
|
||||
theConfigCtx.analyzer("autocompleteNGramAnalyzer").custom()
|
||||
.tokenizer("standard")
|
||||
.tokenFilters("word_delimiter", "lowercase", "ngram_3_20");
|
||||
|
||||
theConfigCtx.tokenFilter("ngram_3_20")
|
||||
.type("nGram")
|
||||
.param("min_gram", "3")
|
||||
.param("max_gram", "20");
|
||||
|
||||
|
||||
theConfigCtx.analyzer("standardAnalyzer").custom()
|
||||
.tokenizer("standard")
|
||||
.tokenFilters("lowercase");
|
||||
|
||||
theConfigCtx.analyzer("exactAnalyzer")
|
||||
.custom()
|
||||
.tokenizer("keyword");
|
||||
|
||||
theConfigCtx.analyzer("conceptParentPidsAnalyzer").custom()
|
||||
.tokenizer("whitespace");
|
||||
|
||||
theConfigCtx.analyzer("termConceptPropertyAnalyzer").custom()
|
||||
.tokenizer("whitespace");
|
||||
}
|
||||
}
|
|
@ -27,20 +27,38 @@ import org.apache.http.auth.UsernamePasswordCredentials;
|
|||
import org.apache.http.client.CredentialsProvider;
|
||||
import org.apache.http.impl.client.BasicCredentialsProvider;
|
||||
import org.apache.http.message.BasicHeader;
|
||||
import org.shadehapi.elasticsearch.client.RestClient;
|
||||
import org.shadehapi.elasticsearch.client.RestClientBuilder;
|
||||
import org.shadehapi.elasticsearch.client.RestHighLevelClient;
|
||||
import org.elasticsearch.client.RestClient;
|
||||
import org.elasticsearch.client.RestClientBuilder;
|
||||
import org.elasticsearch.client.RestHighLevelClient;
|
||||
|
||||
public class ElasticsearchRestClientFactory {
|
||||
|
||||
|
||||
private static String determineScheme(String theHostname) {
|
||||
int schemeIdx = theHostname.indexOf("://");
|
||||
if (schemeIdx > 0) {
|
||||
return theHostname.substring(0, schemeIdx);
|
||||
} else {
|
||||
return "http";
|
||||
}
|
||||
}
|
||||
|
||||
private static String stripHostOfScheme(String theHostname) {
|
||||
int schemeIdx = theHostname.indexOf("://");
|
||||
if (schemeIdx > 0) {
|
||||
return theHostname.substring(schemeIdx + 3);
|
||||
} else {
|
||||
return theHostname;
|
||||
}
|
||||
}
|
||||
|
||||
static public RestHighLevelClient createElasticsearchHighLevelRestClient(String theHostname, int thePort, String theUsername, String thePassword) {
|
||||
final CredentialsProvider credentialsProvider =
|
||||
new BasicCredentialsProvider();
|
||||
credentialsProvider.setCredentials(AuthScope.ANY,
|
||||
new UsernamePasswordCredentials(theUsername, thePassword));
|
||||
|
||||
RestClientBuilder clientBuilder = RestClient.builder(
|
||||
new HttpHost(theHostname, thePort))
|
||||
new HttpHost(stripHostOfScheme(theHostname), thePort, determineScheme(theHostname)))
|
||||
.setHttpClientConfigCallback(httpClientBuilder -> httpClientBuilder
|
||||
.setDefaultCredentialsProvider(credentialsProvider));
|
||||
|
||||
|
|
|
@ -37,40 +37,40 @@ import com.fasterxml.jackson.core.JsonProcessingException;
|
|||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.shadehapi.elasticsearch.action.DocWriteResponse;
|
||||
import org.shadehapi.elasticsearch.action.admin.indices.create.CreateIndexRequest;
|
||||
import org.shadehapi.elasticsearch.action.admin.indices.create.CreateIndexResponse;
|
||||
import org.shadehapi.elasticsearch.action.admin.indices.get.GetIndexRequest;
|
||||
import org.shadehapi.elasticsearch.action.admin.indices.refresh.RefreshRequest;
|
||||
import org.shadehapi.elasticsearch.action.index.IndexRequest;
|
||||
import org.shadehapi.elasticsearch.action.index.IndexResponse;
|
||||
import org.shadehapi.elasticsearch.action.search.SearchRequest;
|
||||
import org.shadehapi.elasticsearch.action.search.SearchResponse;
|
||||
import org.shadehapi.elasticsearch.client.RequestOptions;
|
||||
import org.shadehapi.elasticsearch.client.RestHighLevelClient;
|
||||
import org.shadehapi.elasticsearch.common.xcontent.XContentType;
|
||||
import org.shadehapi.elasticsearch.index.query.BoolQueryBuilder;
|
||||
import org.shadehapi.elasticsearch.index.query.MatchQueryBuilder;
|
||||
import org.shadehapi.elasticsearch.index.query.QueryBuilders;
|
||||
import org.shadehapi.elasticsearch.index.query.RangeQueryBuilder;
|
||||
import org.shadehapi.elasticsearch.index.reindex.DeleteByQueryRequest;
|
||||
import org.shadehapi.elasticsearch.search.SearchHit;
|
||||
import org.shadehapi.elasticsearch.search.SearchHits;
|
||||
import org.shadehapi.elasticsearch.search.aggregations.AggregationBuilder;
|
||||
import org.shadehapi.elasticsearch.search.aggregations.AggregationBuilders;
|
||||
import org.shadehapi.elasticsearch.search.aggregations.Aggregations;
|
||||
import org.shadehapi.elasticsearch.search.aggregations.BucketOrder;
|
||||
import org.shadehapi.elasticsearch.search.aggregations.bucket.composite.CompositeAggregationBuilder;
|
||||
import org.shadehapi.elasticsearch.search.aggregations.bucket.composite.CompositeValuesSourceBuilder;
|
||||
import org.shadehapi.elasticsearch.search.aggregations.bucket.composite.ParsedComposite;
|
||||
import org.shadehapi.elasticsearch.search.aggregations.bucket.composite.TermsValuesSourceBuilder;
|
||||
import org.shadehapi.elasticsearch.search.aggregations.bucket.terms.ParsedTerms;
|
||||
import org.shadehapi.elasticsearch.search.aggregations.bucket.terms.Terms;
|
||||
import org.shadehapi.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
|
||||
import org.shadehapi.elasticsearch.search.aggregations.metrics.tophits.ParsedTopHits;
|
||||
import org.shadehapi.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.shadehapi.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.shadehapi.elasticsearch.search.sort.SortOrder;
|
||||
import org.elasticsearch.action.DocWriteResponse;
|
||||
import org.elasticsearch.action.admin.indices.refresh.RefreshRequest;
|
||||
import org.elasticsearch.action.index.IndexRequest;
|
||||
import org.elasticsearch.action.index.IndexResponse;
|
||||
import org.elasticsearch.action.search.SearchRequest;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.client.RequestOptions;
|
||||
import org.elasticsearch.client.RestHighLevelClient;
|
||||
import org.elasticsearch.client.indices.CreateIndexRequest;
|
||||
import org.elasticsearch.client.indices.CreateIndexResponse;
|
||||
import org.elasticsearch.client.indices.GetIndexRequest;
|
||||
import org.elasticsearch.common.xcontent.XContentType;
|
||||
import org.elasticsearch.index.query.BoolQueryBuilder;
|
||||
import org.elasticsearch.index.query.MatchQueryBuilder;
|
||||
import org.elasticsearch.index.query.QueryBuilders;
|
||||
import org.elasticsearch.index.query.RangeQueryBuilder;
|
||||
import org.elasticsearch.index.reindex.DeleteByQueryRequest;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.search.SearchHits;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.AggregationBuilders;
|
||||
import org.elasticsearch.search.aggregations.Aggregations;
|
||||
import org.elasticsearch.search.aggregations.BucketOrder;
|
||||
import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.composite.CompositeValuesSourceBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.composite.ParsedComposite;
|
||||
import org.elasticsearch.search.aggregations.bucket.composite.TermsValuesSourceBuilder;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.ParsedTerms;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.Terms;
|
||||
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
|
||||
import org.elasticsearch.search.aggregations.metrics.ParsedTopHits;
|
||||
import org.elasticsearch.search.aggregations.support.ValueType;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.search.sort.SortOrder;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
|
@ -120,9 +120,16 @@ public class ElasticsearchSvcImpl implements IElasticsearchSvc {
|
|||
private final RestHighLevelClient myRestHighLevelClient;
|
||||
|
||||
private final ObjectMapper objectMapper = new ObjectMapper();
|
||||
|
||||
@Autowired
|
||||
private PartitionSettings myPartitionSettings;
|
||||
|
||||
//This constructor used to inject a dummy partitionsettings in test.
|
||||
public ElasticsearchSvcImpl(PartitionSettings thePartitionSetings, String theHostname, int thePort, String theUsername, String thePassword) {
|
||||
this(theHostname, thePort, theUsername, thePassword);
|
||||
this.myPartitionSettings = thePartitionSetings;
|
||||
}
|
||||
|
||||
public ElasticsearchSvcImpl(String theHostname, int thePort, String theUsername, String thePassword) {
|
||||
myRestHighLevelClient = ElasticsearchRestClientFactory.createElasticsearchHighLevelRestClient(theHostname, thePort, theUsername, thePassword);
|
||||
|
||||
|
@ -176,8 +183,7 @@ public class ElasticsearchSvcImpl implements IElasticsearchSvc {
|
|||
}
|
||||
|
||||
private boolean indexExists(String theIndexName) throws IOException {
|
||||
GetIndexRequest request = new GetIndexRequest();
|
||||
request.indices(theIndexName);
|
||||
GetIndexRequest request = new GetIndexRequest(theIndexName);
|
||||
return myRestHighLevelClient.indices().exists(request, RequestOptions.DEFAULT);
|
||||
}
|
||||
|
||||
|
@ -278,14 +284,14 @@ public class ElasticsearchSvcImpl implements IElasticsearchSvc {
|
|||
}
|
||||
|
||||
private TermsAggregationBuilder createObservationCodeAggregationBuilder(int theMaxNumberObservationsPerCode, String[] theTopHitsInclude) {
|
||||
TermsAggregationBuilder observationCodeCodeAggregationBuilder = new TermsAggregationBuilder(GROUP_BY_CODE, ValueType.STRING).field(OBSERVATION_CODEVALUE_FIELD_NAME);
|
||||
TermsAggregationBuilder observationCodeCodeAggregationBuilder = new TermsAggregationBuilder(GROUP_BY_CODE).field(OBSERVATION_CODEVALUE_FIELD_NAME);
|
||||
observationCodeCodeAggregationBuilder.order(BucketOrder.key(true));
|
||||
// Top Hits Aggregation
|
||||
observationCodeCodeAggregationBuilder.subAggregation(AggregationBuilders.topHits(MOST_RECENT_EFFECTIVE)
|
||||
.sort(OBSERVATION_EFFECTIVEDTM_FIELD_NAME, SortOrder.DESC)
|
||||
.fetchSource(theTopHitsInclude, null).size(theMaxNumberObservationsPerCode));
|
||||
observationCodeCodeAggregationBuilder.size(10000);
|
||||
TermsAggregationBuilder observationCodeSystemAggregationBuilder = new TermsAggregationBuilder(GROUP_BY_SYSTEM, ValueType.STRING).field(OBSERVATION_CODESYSTEM_FIELD_NAME);
|
||||
TermsAggregationBuilder observationCodeSystemAggregationBuilder = new TermsAggregationBuilder(GROUP_BY_SYSTEM).field(OBSERVATION_CODESYSTEM_FIELD_NAME);
|
||||
observationCodeSystemAggregationBuilder.order(BucketOrder.key(true));
|
||||
observationCodeSystemAggregationBuilder.subAggregation(observationCodeCodeAggregationBuilder);
|
||||
return observationCodeSystemAggregationBuilder;
|
||||
|
@ -728,8 +734,6 @@ public class ElasticsearchSvcImpl implements IElasticsearchSvc {
|
|||
private IndexRequest createIndexRequest(String theIndexName, String theDocumentId, String theObservationDocument, String theDocumentType) {
|
||||
IndexRequest request = new IndexRequest(theIndexName);
|
||||
request.id(theDocumentId);
|
||||
request.type(theDocumentType);
|
||||
|
||||
request.source(theObservationDocument, XContentType.JSON);
|
||||
return request;
|
||||
}
|
||||
|
|
|
@ -46,7 +46,6 @@ import com.google.common.annotations.VisibleForTesting;
|
|||
import org.apache.commons.lang3.Validate;
|
||||
import org.apache.commons.lang3.concurrent.BasicThreadFactory;
|
||||
import org.apache.commons.lang3.time.DateUtils;
|
||||
import org.hibernate.search.util.impl.Executors;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.r4.model.InstantType;
|
||||
import org.quartz.JobExecutionContext;
|
||||
|
@ -165,7 +164,7 @@ public class ResourceReindexingSvcImpl implements IResourceReindexingSvc {
|
|||
public void initExecutor() {
|
||||
// Create the threadpool executor used for reindex jobs
|
||||
int reindexThreadCount = myDaoConfig.getReindexThreadCount();
|
||||
RejectedExecutionHandler rejectHandler = new Executors.BlockPolicy();
|
||||
RejectedExecutionHandler rejectHandler = new BlockPolicy();
|
||||
myTaskExecutor = new ThreadPoolExecutor(0, reindexThreadCount,
|
||||
0L, TimeUnit.MILLISECONDS,
|
||||
new LinkedBlockingQueue<>(100),
|
||||
|
@ -173,6 +172,30 @@ public class ResourceReindexingSvcImpl implements IResourceReindexingSvc {
|
|||
rejectHandler
|
||||
);
|
||||
}
|
||||
/**
|
||||
* A handler for rejected tasks that will have the caller block until space is available.
|
||||
* This was stolen from old hibernate search(5.X.X), as it has been removed in HS6. We can probably come up with a better solution though.
|
||||
*/
|
||||
public static class BlockPolicy implements RejectedExecutionHandler {
|
||||
|
||||
/**
|
||||
* Puts the Runnable to the blocking queue, effectively blocking the delegating thread until space is available.
|
||||
*
|
||||
* @param r the runnable task requested to be executed
|
||||
* @param e the executor attempting to execute this task
|
||||
*/
|
||||
@Override
|
||||
public void rejectedExecution(Runnable r, ThreadPoolExecutor e) {
|
||||
try {
|
||||
e.getQueue().put( r );
|
||||
}
|
||||
catch (InterruptedException e1) {
|
||||
ourLog.error("Interrupted Execption for task: {}",r, e1 );
|
||||
Thread.currentThread().interrupt();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public void scheduleJob() {
|
||||
ScheduledJobDefinition jobDetail = new ScheduledJobDefinition();
|
||||
|
@ -545,6 +568,7 @@ public class ResourceReindexingSvcImpl implements IResourceReindexingSvc {
|
|||
}
|
||||
|
||||
doReindex(resourceTable, resource);
|
||||
|
||||
return null;
|
||||
|
||||
} catch (Exception e) {
|
||||
|
|
|
@ -31,6 +31,7 @@ import ca.uhn.fhir.jpa.api.dao.IDao;
|
|||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoCodeSystem;
|
||||
import ca.uhn.fhir.jpa.api.model.TranslationQuery;
|
||||
import ca.uhn.fhir.jpa.api.model.TranslationRequest;
|
||||
import ca.uhn.fhir.jpa.config.HibernatePropertiesProvider;
|
||||
import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
|
||||
import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemVersionDao;
|
||||
|
@ -56,7 +57,7 @@ import ca.uhn.fhir.jpa.entity.TermConceptMapGroupElementTarget;
|
|||
import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink;
|
||||
import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink.RelationshipTypeEnum;
|
||||
import ca.uhn.fhir.jpa.entity.TermConceptProperty;
|
||||
import ca.uhn.fhir.jpa.entity.TermConceptPropertyFieldBridge;
|
||||
import ca.uhn.fhir.jpa.entity.TermConceptPropertyBinder;
|
||||
import ca.uhn.fhir.jpa.entity.TermConceptPropertyTypeEnum;
|
||||
import ca.uhn.fhir.jpa.entity.TermValueSet;
|
||||
import ca.uhn.fhir.jpa.entity.TermValueSetConcept;
|
||||
|
@ -98,18 +99,17 @@ import org.apache.commons.lang3.StringUtils;
|
|||
import org.apache.commons.lang3.Validate;
|
||||
import org.apache.commons.lang3.time.DateUtils;
|
||||
import org.apache.lucene.index.Term;
|
||||
import org.apache.lucene.queries.TermsQuery;
|
||||
import org.apache.lucene.search.BooleanClause;
|
||||
import org.apache.lucene.search.BooleanQuery;
|
||||
import org.apache.lucene.search.Query;
|
||||
import org.apache.lucene.search.RegexpQuery;
|
||||
import org.apache.lucene.search.TermQuery;
|
||||
import org.hibernate.ScrollMode;
|
||||
import org.hibernate.ScrollableResults;
|
||||
import org.hibernate.search.jpa.FullTextEntityManager;
|
||||
import org.hibernate.search.jpa.FullTextQuery;
|
||||
import org.hibernate.search.query.dsl.BooleanJunction;
|
||||
import org.hibernate.search.query.dsl.QueryBuilder;
|
||||
import org.hibernate.search.backend.elasticsearch.ElasticsearchExtension;
|
||||
import org.hibernate.search.backend.lucene.LuceneExtension;
|
||||
import org.hibernate.search.engine.search.predicate.dsl.BooleanPredicateClausesStep;
|
||||
import org.hibernate.search.engine.search.predicate.dsl.PredicateFinalStep;
|
||||
import org.hibernate.search.engine.search.predicate.dsl.SearchPredicateFactory;
|
||||
import org.hibernate.search.engine.search.query.SearchQuery;
|
||||
import org.hibernate.search.mapper.orm.Search;
|
||||
import org.hibernate.search.mapper.orm.session.SearchSession;
|
||||
import org.hl7.fhir.common.hapi.validation.support.CommonCodeSystemsTerminologyService;
|
||||
import org.hl7.fhir.common.hapi.validation.support.InMemoryTerminologyServerValidationSupport;
|
||||
import org.hl7.fhir.exceptions.FHIRException;
|
||||
|
@ -252,6 +252,14 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc {
|
|||
private volatile IValidationSupport myJpaValidationSupport;
|
||||
private volatile IValidationSupport myValidationSupport;
|
||||
|
||||
//We need this bean so we can tell which mode hibernate search is running in.
|
||||
@Autowired
|
||||
private HibernatePropertiesProvider myHibernatePropertiesProvider;
|
||||
|
||||
private boolean isFullTextSetToUseElastic() {
|
||||
return "elasticsearch".equalsIgnoreCase(myHibernatePropertiesProvider.getHibernateSearchBackend());
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isCodeSystemSupported(ValidationSupportContext theValidationSupportContext, String theSystem) {
|
||||
TermCodeSystemVersion cs = getCurrentCodeSystemVersion(theSystem);
|
||||
|
@ -538,6 +546,8 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc {
|
|||
conceptViews = myTermValueSetConceptViewDao.findByTermValueSetId(theTermValueSet.getId(), displayValue);
|
||||
wasFilteredResult = true;
|
||||
} else {
|
||||
// TODO JA HS: I'm pretty sure we are overfetching here. test says offset 3, count 4, but we are fetching index 3 -> 10 here, grabbing 7 concepts.
|
||||
//Specifically this test testExpandInline_IncludePreExpandedValueSetByUri_FilterOnDisplay_LeftMatch_SelectRange
|
||||
conceptViews = myTermValueSetConceptViewDao.findByTermValueSetId(offset, toIndex, theTermValueSet.getId());
|
||||
theAccumulator.consumeSkipCount(offset);
|
||||
if (theAdd) {
|
||||
|
@ -899,8 +909,8 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc {
|
|||
} else {
|
||||
csv = myCodeSystemVersionDao.findByCodeSystemPidAndVersion(theCs.getPid(), includeOrExcludeVersion);
|
||||
}
|
||||
FullTextEntityManager em = org.hibernate.search.jpa.Search.getFullTextEntityManager(myEntityManager);
|
||||
|
||||
SearchSession searchSession = Search.session(myEntityManager);
|
||||
/*
|
||||
* If FullText searching is not enabled, we can handle only basic expansions
|
||||
* since we're going to do it without the database.
|
||||
|
@ -913,68 +923,41 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc {
|
|||
/*
|
||||
* Ok, let's use hibernate search to build the expansion
|
||||
*/
|
||||
QueryBuilder qb = em.getSearchFactory().buildQueryBuilder().forEntity(TermConcept.class).get();
|
||||
BooleanJunction<?> bool = qb.bool();
|
||||
//Manually building a predicate since we need to throw it around.
|
||||
SearchPredicateFactory predicate = searchSession.scope(TermConcept.class).predicate();
|
||||
|
||||
bool.must(qb.keyword().onField("myCodeSystemVersionPid").matching(csv.getPid()).createQuery());
|
||||
//Build the top-level expansion on filters.
|
||||
PredicateFinalStep step = predicate.bool(b -> {
|
||||
b.must(predicate.match().field("myCodeSystemVersionPid").matching(csv.getPid()));
|
||||
|
||||
if (theExpansionFilter.hasCode()) {
|
||||
bool.must(qb.keyword().onField("myCode").matching(theExpansionFilter.getCode()).createQuery());
|
||||
}
|
||||
|
||||
/*
|
||||
* Filters
|
||||
*/
|
||||
String codeSystemUrlAndVersion;
|
||||
if (includeOrExcludeVersion != null) {
|
||||
codeSystemUrlAndVersion = theSystem + "|" + includeOrExcludeVersion;
|
||||
} else {
|
||||
codeSystemUrlAndVersion = theSystem;
|
||||
}
|
||||
for (ValueSet.ConceptSetFilterComponent nextFilter : theIncludeOrExclude.getFilter()) {
|
||||
handleFilter(codeSystemUrlAndVersion, qb, bool, nextFilter);
|
||||
}
|
||||
for (ValueSet.ConceptSetFilterComponent nextFilter : theExpansionFilter.getFilters()) {
|
||||
handleFilter(codeSystemUrlAndVersion, qb, bool, nextFilter);
|
||||
}
|
||||
|
||||
Query luceneQuery = bool.createQuery();
|
||||
|
||||
/*
|
||||
* Include/Exclude Concepts
|
||||
*/
|
||||
List<Term> codes = theIncludeOrExclude
|
||||
.getConcept()
|
||||
.stream()
|
||||
.filter(Objects::nonNull)
|
||||
.map(ValueSet.ConceptReferenceComponent::getCode)
|
||||
.filter(StringUtils::isNotBlank)
|
||||
.map(t -> new Term("myCode", t))
|
||||
.collect(Collectors.toList());
|
||||
if (codes.size() > 0) {
|
||||
|
||||
BooleanQuery.Builder builder = new BooleanQuery.Builder();
|
||||
builder.setMinimumNumberShouldMatch(1);
|
||||
for (Term nextCode : codes) {
|
||||
builder.add(new TermQuery(nextCode), BooleanClause.Occur.SHOULD);
|
||||
if (theExpansionFilter.hasCode()) {
|
||||
b.must(predicate.match().field("myCode").matching(theExpansionFilter.getCode()));
|
||||
}
|
||||
|
||||
luceneQuery = new BooleanQuery.Builder()
|
||||
.add(luceneQuery, BooleanClause.Occur.MUST)
|
||||
.add(builder.build(), BooleanClause.Occur.MUST)
|
||||
.build();
|
||||
}
|
||||
String codeSystemUrlAndVersion = buildCodeSystemUrlAndVersion(theSystem, includeOrExcludeVersion);
|
||||
for (ValueSet.ConceptSetFilterComponent nextFilter : theIncludeOrExclude.getFilter()) {
|
||||
handleFilter(codeSystemUrlAndVersion, predicate, b, nextFilter);
|
||||
}
|
||||
for (ValueSet.ConceptSetFilterComponent nextFilter : theExpansionFilter.getFilters()) {
|
||||
handleFilter(codeSystemUrlAndVersion, predicate, b, nextFilter);
|
||||
}
|
||||
});
|
||||
|
||||
/*
|
||||
* Execute the query
|
||||
*/
|
||||
FullTextQuery jpaQuery = em.createFullTextQuery(luceneQuery, TermConcept.class);
|
||||
PredicateFinalStep expansionStep = buildExpansionPredicate(theIncludeOrExclude, predicate);
|
||||
final PredicateFinalStep finishedQuery;
|
||||
if (expansionStep == null) {
|
||||
finishedQuery = step;
|
||||
} else {
|
||||
finishedQuery = predicate.bool().must(step).must(expansionStep);
|
||||
}
|
||||
|
||||
/*
|
||||
* DM 2019-08-21 - Processing slows after any ValueSets with many codes explicitly identified. This might
|
||||
* be due to the dark arts that is memory management. Will monitor but not do anything about this right now.
|
||||
*/
|
||||
BooleanQuery.setMaxClauseCount(SearchBuilder.getMaximumPageSize());
|
||||
//BooleanQuery.setMaxClauseCount(SearchBuilder.getMaximumPageSize());
|
||||
//TODO GGG HS looks like we can't set max clause count, but it can be set server side.
|
||||
//BooleanQuery.setMaxClauseCount(10000);
|
||||
|
||||
StopWatch sw = new StopWatch();
|
||||
AtomicInteger count = new AtomicInteger(0);
|
||||
|
@ -995,22 +978,27 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc {
|
|||
}
|
||||
}
|
||||
|
||||
jpaQuery.setMaxResults(maxResultsPerBatch);
|
||||
jpaQuery.setFirstResult(theQueryIndex * maxResultsPerBatch);
|
||||
// jpaQuery.setMaxResults(maxResultsPerBatch);
|
||||
// jpaQuery.setFirstResult(theQueryIndex * maxResultsPerBatch);
|
||||
|
||||
ourLog.debug("Beginning batch expansion for {} with max results per batch: {}", (theAdd ? "inclusion" : "exclusion"), maxResultsPerBatch);
|
||||
|
||||
StopWatch swForBatch = new StopWatch();
|
||||
AtomicInteger countForBatch = new AtomicInteger(0);
|
||||
|
||||
List<?> resultList = jpaQuery.getResultList();
|
||||
int resultsInBatch = resultList.size();
|
||||
int firstResult = jpaQuery.getFirstResult();
|
||||
SearchQuery<TermConcept> termConceptsQuery = searchSession.search(TermConcept.class)
|
||||
.where(f -> finishedQuery).toQuery();
|
||||
|
||||
System.out.println("About to query:" + termConceptsQuery.queryString());
|
||||
List<TermConcept> termConcepts = termConceptsQuery.fetchHits(theQueryIndex * maxResultsPerBatch, maxResultsPerBatch);
|
||||
|
||||
|
||||
int resultsInBatch = termConcepts.size();
|
||||
int firstResult = theQueryIndex * maxResultsPerBatch;// TODO GGG HS we lose the ability to check the index of the first result, so just best-guessing it here.
|
||||
int delta = 0;
|
||||
for (Object next : resultList) {
|
||||
for (TermConcept concept: termConcepts) {
|
||||
count.incrementAndGet();
|
||||
countForBatch.incrementAndGet();
|
||||
TermConcept concept = (TermConcept) next;
|
||||
boolean added = addCodeIfNotAlreadyAdded(theValueSetCodeAccumulator, theAddedCodes, concept, theAdd, includeOrExcludeVersion);
|
||||
if (added) {
|
||||
delta++;
|
||||
|
@ -1028,6 +1016,46 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper method which builds a predicate for the expansion
|
||||
*/
|
||||
private PredicateFinalStep buildExpansionPredicate(ValueSet.ConceptSetComponent theTheIncludeOrExclude, SearchPredicateFactory thePredicate) {
|
||||
PredicateFinalStep expansionStep;
|
||||
/*
|
||||
* Include/Exclude Concepts
|
||||
*/
|
||||
List<Term> codes = theTheIncludeOrExclude
|
||||
.getConcept()
|
||||
.stream()
|
||||
.filter(Objects::nonNull)
|
||||
.map(ValueSet.ConceptReferenceComponent::getCode)
|
||||
.filter(StringUtils::isNotBlank)
|
||||
.map(t -> new Term("myCode", t))
|
||||
.collect(Collectors.toList());
|
||||
|
||||
if (codes.size() > 0) {
|
||||
expansionStep = thePredicate.bool(b -> {
|
||||
b.minimumShouldMatchNumber(1);
|
||||
for (Term code : codes) {
|
||||
b.should(thePredicate.match().field(code.field()).matching(code.text()));
|
||||
}
|
||||
});
|
||||
return expansionStep;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private String buildCodeSystemUrlAndVersion(String theSystem, String theIncludeOrExcludeVersion) {
|
||||
String codeSystemUrlAndVersion;
|
||||
if (theIncludeOrExcludeVersion != null) {
|
||||
codeSystemUrlAndVersion = theSystem + "|" + theIncludeOrExcludeVersion;
|
||||
} else {
|
||||
codeSystemUrlAndVersion = theSystem;
|
||||
}
|
||||
return codeSystemUrlAndVersion;
|
||||
}
|
||||
|
||||
private @Nonnull
|
||||
ValueSetExpansionOptions provideExpansionOptions(@Nullable ValueSetExpansionOptions theExpansionOptions) {
|
||||
if (theExpansionOptions != null) {
|
||||
|
@ -1046,7 +1074,7 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc {
|
|||
}
|
||||
}
|
||||
|
||||
private void handleFilter(String theCodeSystemIdentifier, QueryBuilder theQb, BooleanJunction<?> theBool, ValueSet.ConceptSetFilterComponent theFilter) {
|
||||
private void handleFilter(String theCodeSystemIdentifier, SearchPredicateFactory theF, BooleanPredicateClausesStep<?> theB, ValueSet.ConceptSetFilterComponent theFilter) {
|
||||
if (isBlank(theFilter.getValue()) && theFilter.getOp() == null && isBlank(theFilter.getProperty())) {
|
||||
return;
|
||||
}
|
||||
|
@ -1058,258 +1086,36 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc {
|
|||
switch (theFilter.getProperty()) {
|
||||
case "display:exact":
|
||||
case "display":
|
||||
handleFilterDisplay(theQb, theBool, theFilter);
|
||||
handleFilterDisplay(theF, theB, theFilter);
|
||||
break;
|
||||
case "concept":
|
||||
case "code":
|
||||
handleFilterConceptAndCode(theCodeSystemIdentifier, theQb, theBool, theFilter);
|
||||
handleFilterConceptAndCode(theCodeSystemIdentifier, theF, theB, theFilter);
|
||||
break;
|
||||
case "parent":
|
||||
case "child":
|
||||
isCodeSystemLoincOrThrowInvalidRequestException(theCodeSystemIdentifier, theFilter.getProperty());
|
||||
handleFilterLoincParentChild(theBool, theFilter);
|
||||
handleFilterLoincParentChild(theF, theB, theFilter);
|
||||
break;
|
||||
case "ancestor":
|
||||
isCodeSystemLoincOrThrowInvalidRequestException(theCodeSystemIdentifier, theFilter.getProperty());
|
||||
handleFilterLoincAncestor(theCodeSystemIdentifier, theBool, theFilter);
|
||||
handleFilterLoincAncestor2(theCodeSystemIdentifier, theF, theB, theFilter);
|
||||
break;
|
||||
case "descendant":
|
||||
isCodeSystemLoincOrThrowInvalidRequestException(theCodeSystemIdentifier, theFilter.getProperty());
|
||||
handleFilterLoincDescendant(theCodeSystemIdentifier, theBool, theFilter);
|
||||
handleFilterLoincDescendant(theCodeSystemIdentifier, theF, theB, theFilter);
|
||||
break;
|
||||
case "copyright":
|
||||
isCodeSystemLoincOrThrowInvalidRequestException(theCodeSystemIdentifier, theFilter.getProperty());
|
||||
handleFilterLoincCopyright(theBool, theFilter);
|
||||
handleFilterLoincCopyright(theF, theB, theFilter);
|
||||
break;
|
||||
default:
|
||||
handleFilterRegex(theBool, theFilter);
|
||||
handleFilterRegex(theF, theB, theFilter);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
private void isCodeSystemLoincOrThrowInvalidRequestException(String theSystemIdentifier, String theProperty) {
|
||||
String systemUrl = getUrlFromIdentifier(theSystemIdentifier);
|
||||
if (!isCodeSystemLoinc(systemUrl)) {
|
||||
throw new InvalidRequestException("Invalid filter, property " + theProperty + " is LOINC-specific and cannot be used with system: " + systemUrl);
|
||||
}
|
||||
}
|
||||
|
||||
private boolean isCodeSystemLoinc(String theSystem) {
|
||||
return ITermLoaderSvc.LOINC_URI.equals(theSystem);
|
||||
}
|
||||
|
||||
private void handleFilterDisplay(QueryBuilder theQb, BooleanJunction<?> theBool, ValueSet.ConceptSetFilterComponent theFilter) {
|
||||
if (theFilter.getProperty().equals("display:exact") && theFilter.getOp() == ValueSet.FilterOperator.EQUAL) {
|
||||
addDisplayFilterExact(theQb, theBool, theFilter);
|
||||
} else if (theFilter.getProperty().equals("display") && theFilter.getOp() == ValueSet.FilterOperator.EQUAL) {
|
||||
if (theFilter.getValue().trim().contains(" ")) {
|
||||
addDisplayFilterExact(theQb, theBool, theFilter);
|
||||
} else {
|
||||
addDisplayFilterInexact(theQb, theBool, theFilter);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void addDisplayFilterExact(QueryBuilder qb, BooleanJunction<?> bool, ValueSet.ConceptSetFilterComponent nextFilter) {
|
||||
bool.must(qb.phrase().onField("myDisplay").sentence(nextFilter.getValue()).createQuery());
|
||||
}
|
||||
|
||||
private void addDisplayFilterInexact(QueryBuilder qb, BooleanJunction<?> bool, ValueSet.ConceptSetFilterComponent nextFilter) {
|
||||
Query textQuery = qb
|
||||
.phrase()
|
||||
.withSlop(2)
|
||||
.onField("myDisplay").boostedTo(4.0f)
|
||||
//.andField("myDisplayEdgeNGram").boostedTo(2.0f)
|
||||
.andField("myDisplayWordEdgeNGram").boostedTo(1.0f)
|
||||
// .andField("myDisplayPhonetic").boostedTo(0.5f)
|
||||
.sentence(nextFilter.getValue().toLowerCase()).createQuery();
|
||||
bool.must(textQuery);
|
||||
}
|
||||
|
||||
private void handleFilterConceptAndCode(String theSystem, QueryBuilder theQb, BooleanJunction<?> theBool, ValueSet.ConceptSetFilterComponent theFilter) {
|
||||
TermConcept code = findCode(theSystem, theFilter.getValue())
|
||||
.orElseThrow(() -> new InvalidRequestException("Invalid filter criteria - code does not exist: {" + Constants.codeSystemWithDefaultDescription(theSystem) + "}" + theFilter.getValue()));
|
||||
|
||||
if (theFilter.getOp() == ValueSet.FilterOperator.ISA) {
|
||||
ourLog.debug(" * Filtering on codes with a parent of {}/{}/{}", code.getId(), code.getCode(), code.getDisplay());
|
||||
theBool.must(theQb.keyword().onField("myParentPids").matching("" + code.getId()).createQuery());
|
||||
} else {
|
||||
throw new InvalidRequestException("Don't know how to handle op=" + theFilter.getOp() + " on property " + theFilter.getProperty());
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("EnumSwitchStatementWhichMissesCases")
|
||||
private void handleFilterLoincParentChild(BooleanJunction<?> theBool, ValueSet.ConceptSetFilterComponent theFilter) {
|
||||
switch (theFilter.getOp()) {
|
||||
case EQUAL:
|
||||
addLoincFilterParentChildEqual(theBool, theFilter.getProperty(), theFilter.getValue());
|
||||
break;
|
||||
case IN:
|
||||
addLoincFilterParentChildIn(theBool, theFilter);
|
||||
break;
|
||||
default:
|
||||
throw new InvalidRequestException("Don't know how to handle op=" + theFilter.getOp() + " on property " + theFilter.getProperty());
|
||||
}
|
||||
}
|
||||
|
||||
private void addLoincFilterParentChildEqual(BooleanJunction<?> theBool, String theProperty, String theValue) {
|
||||
logFilteringValueOnProperty(theValue, theProperty);
|
||||
theBool.must(new TermsQuery(getPropertyTerm(theProperty, theValue)));
|
||||
}
|
||||
|
||||
private void addLoincFilterParentChildIn(BooleanJunction<?> theBool, ValueSet.ConceptSetFilterComponent theFilter) {
|
||||
String[] values = theFilter.getValue().split(",");
|
||||
List<Term> terms = new ArrayList<>();
|
||||
for (String value : values) {
|
||||
logFilteringValueOnProperty(value, theFilter.getProperty());
|
||||
terms.add(getPropertyTerm(theFilter.getProperty(), value));
|
||||
}
|
||||
theBool.must(new TermsQuery(terms));
|
||||
}
|
||||
|
||||
private Term getPropertyTerm(String theProperty, String theValue) {
|
||||
return new Term(TermConceptPropertyFieldBridge.CONCEPT_FIELD_PROPERTY_PREFIX + theProperty, theValue);
|
||||
}
|
||||
|
||||
@SuppressWarnings("EnumSwitchStatementWhichMissesCases")
|
||||
private void handleFilterLoincAncestor(String theSystem, BooleanJunction<?> theBool, ValueSet.ConceptSetFilterComponent theFilter) {
|
||||
switch (theFilter.getOp()) {
|
||||
case EQUAL:
|
||||
addLoincFilterAncestorEqual(theSystem, theBool, theFilter);
|
||||
break;
|
||||
case IN:
|
||||
addLoincFilterAncestorIn(theSystem, theBool, theFilter);
|
||||
break;
|
||||
default:
|
||||
throw new InvalidRequestException("Don't know how to handle op=" + theFilter.getOp() + " on property " + theFilter.getProperty());
|
||||
}
|
||||
}
|
||||
|
||||
private void addLoincFilterAncestorEqual(String theSystem, BooleanJunction<?> theBool, ValueSet.ConceptSetFilterComponent theFilter) {
|
||||
addLoincFilterAncestorEqual(theSystem, theBool, theFilter.getProperty(), theFilter.getValue());
|
||||
}
|
||||
|
||||
private void addLoincFilterAncestorEqual(String theSystem, BooleanJunction<?> theBool, String theProperty, String theValue) {
|
||||
List<Term> terms = getAncestorTerms(theSystem, theProperty, theValue);
|
||||
theBool.must(new TermsQuery(terms));
|
||||
}
|
||||
|
||||
private void addLoincFilterAncestorIn(String theSystem, BooleanJunction<?> theBool, ValueSet.ConceptSetFilterComponent theFilter) {
|
||||
String[] values = theFilter.getValue().split(",");
|
||||
List<Term> terms = new ArrayList<>();
|
||||
for (String value : values) {
|
||||
terms.addAll(getAncestorTerms(theSystem, theFilter.getProperty(), value));
|
||||
}
|
||||
theBool.must(new TermsQuery(terms));
|
||||
}
|
||||
|
||||
private List<Term> getAncestorTerms(String theSystem, String theProperty, String theValue) {
|
||||
List<Term> retVal = new ArrayList<>();
|
||||
|
||||
TermConcept code = findCode(theSystem, theValue)
|
||||
.orElseThrow(() -> new InvalidRequestException("Invalid filter criteria - code does not exist: {" + Constants.codeSystemWithDefaultDescription(theSystem) + "}" + theValue));
|
||||
|
||||
retVal.add(new Term("myParentPids", "" + code.getId()));
|
||||
logFilteringValueOnProperty(theValue, theProperty);
|
||||
|
||||
return retVal;
|
||||
}
|
||||
|
||||
@SuppressWarnings("EnumSwitchStatementWhichMissesCases")
|
||||
private void handleFilterLoincDescendant(String theSystem, BooleanJunction<?> theBool, ValueSet.ConceptSetFilterComponent theFilter) {
|
||||
switch (theFilter.getOp()) {
|
||||
case EQUAL:
|
||||
addLoincFilterDescendantEqual(theSystem, theBool, theFilter);
|
||||
break;
|
||||
case IN:
|
||||
addLoincFilterDescendantIn(theSystem, theBool, theFilter);
|
||||
break;
|
||||
default:
|
||||
throw new InvalidRequestException("Don't know how to handle op=" + theFilter.getOp() + " on property " + theFilter.getProperty());
|
||||
}
|
||||
}
|
||||
|
||||
private void addLoincFilterDescendantEqual(String theSystem, BooleanJunction<?> theBool, ValueSet.ConceptSetFilterComponent theFilter) {
|
||||
addLoincFilterDescendantEqual(theSystem, theBool, theFilter.getProperty(), theFilter.getValue());
|
||||
}
|
||||
|
||||
private void addLoincFilterDescendantEqual(String theSystem, BooleanJunction<?> theBool, String theProperty, String theValue) {
|
||||
List<Term> terms = getDescendantTerms(theSystem, theProperty, theValue);
|
||||
theBool.must(new TermsQuery(terms));
|
||||
}
|
||||
|
||||
private void addLoincFilterDescendantIn(String theSystem, BooleanJunction<?> theBool, ValueSet.ConceptSetFilterComponent theFilter) {
|
||||
String[] values = theFilter.getValue().split(",");
|
||||
List<Term> terms = new ArrayList<>();
|
||||
for (String value : values) {
|
||||
terms.addAll(getDescendantTerms(theSystem, theFilter.getProperty(), value));
|
||||
}
|
||||
theBool.must(new TermsQuery(terms));
|
||||
}
|
||||
|
||||
private List<Term> getDescendantTerms(String theSystem, String theProperty, String theValue) {
|
||||
List<Term> retVal = new ArrayList<>();
|
||||
|
||||
TermConcept code = findCode(theSystem, theValue)
|
||||
.orElseThrow(() -> new InvalidRequestException("Invalid filter criteria - code does not exist: {" + Constants.codeSystemWithDefaultDescription(theSystem) + "}" + theValue));
|
||||
|
||||
String[] parentPids = code.getParentPidsAsString().split(" ");
|
||||
for (String parentPid : parentPids) {
|
||||
retVal.add(new Term("myId", parentPid));
|
||||
}
|
||||
logFilteringValueOnProperty(theValue, theProperty);
|
||||
|
||||
return retVal;
|
||||
}
|
||||
|
||||
private void handleFilterLoincCopyright(BooleanJunction<?> theBool, ValueSet.ConceptSetFilterComponent theFilter) {
|
||||
if (theFilter.getOp() == ValueSet.FilterOperator.EQUAL) {
|
||||
|
||||
String copyrightFilterValue = defaultString(theFilter.getValue()).toLowerCase();
|
||||
switch (copyrightFilterValue) {
|
||||
case "3rdparty":
|
||||
logFilteringValueOnProperty(theFilter.getValue(), theFilter.getProperty());
|
||||
addFilterLoincCopyright3rdParty(theBool);
|
||||
break;
|
||||
case "loinc":
|
||||
logFilteringValueOnProperty(theFilter.getValue(), theFilter.getProperty());
|
||||
addFilterLoincCopyrightLoinc(theBool);
|
||||
break;
|
||||
default:
|
||||
throwInvalidRequestForValueOnProperty(theFilter.getValue(), theFilter.getProperty());
|
||||
}
|
||||
|
||||
} else {
|
||||
throwInvalidRequestForOpOnProperty(theFilter.getOp(), theFilter.getProperty());
|
||||
}
|
||||
}
|
||||
|
||||
private void addFilterLoincCopyright3rdParty(BooleanJunction<?> theBool) {
|
||||
theBool.must(getRegexQueryForFilterLoincCopyright());
|
||||
}
|
||||
|
||||
private void addFilterLoincCopyrightLoinc(BooleanJunction<?> theBool) {
|
||||
theBool.must(getRegexQueryForFilterLoincCopyright()).not();
|
||||
}
|
||||
|
||||
private RegexpQuery getRegexQueryForFilterLoincCopyright() {
|
||||
Term term = new Term(TermConceptPropertyFieldBridge.CONCEPT_FIELD_PROPERTY_PREFIX + "EXTERNAL_COPYRIGHT_NOTICE", ".*");
|
||||
return new RegexpQuery(term);
|
||||
}
|
||||
|
||||
private void logFilteringValueOnProperty(String theValue, String theProperty) {
|
||||
ourLog.debug(" * Filtering with value={} on property {}", theValue, theProperty);
|
||||
}
|
||||
|
||||
private void throwInvalidRequestForOpOnProperty(ValueSet.FilterOperator theOp, String theProperty) {
|
||||
throw new InvalidRequestException("Don't know how to handle op=" + theOp + " on property " + theProperty);
|
||||
}
|
||||
|
||||
private void throwInvalidRequestForValueOnProperty(String theValue, String theProperty) {
|
||||
throw new InvalidRequestException("Don't know how to handle value=" + theValue + " on property " + theProperty);
|
||||
}
|
||||
|
||||
private void handleFilterRegex(BooleanJunction<?> theBool, ValueSet.ConceptSetFilterComponent theFilter) {
|
||||
private void handleFilterRegex(SearchPredicateFactory theF, BooleanPredicateClausesStep<?> theB, ValueSet.ConceptSetFilterComponent theFilter) {
|
||||
if (theFilter.getOp() == ValueSet.FilterOperator.REGEX) {
|
||||
|
||||
/*
|
||||
|
@ -1330,19 +1136,274 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc {
|
|||
value = value.substring(1);
|
||||
}
|
||||
|
||||
Term term = new Term(TermConceptPropertyFieldBridge.CONCEPT_FIELD_PROPERTY_PREFIX + theFilter.getProperty(), value);
|
||||
RegexpQuery query = new RegexpQuery(term);
|
||||
theBool.must(query);
|
||||
Term term = new Term(TermConceptPropertyBinder.CONCEPT_FIELD_PROPERTY_PREFIX + theFilter.getProperty(), value);
|
||||
|
||||
if (isFullTextSetToUseElastic()) {
|
||||
String regexpQuery = "{'regexp':{'" + term.field() + "':{'value':'" + term.text() + "'}}}";
|
||||
ourLog.debug("Build Elasticsearch Regexp Query: {}", regexpQuery);
|
||||
theB.must(theF.extension(ElasticsearchExtension.get()).fromJson(regexpQuery));
|
||||
} else {
|
||||
RegexpQuery query = new RegexpQuery(term);
|
||||
theB.must(theF.extension(LuceneExtension.get()).fromLuceneQuery(query));
|
||||
}
|
||||
} else {
|
||||
|
||||
String value = theFilter.getValue();
|
||||
Term term = new Term(TermConceptPropertyFieldBridge.CONCEPT_FIELD_PROPERTY_PREFIX + theFilter.getProperty(), value);
|
||||
theBool.must(new TermsQuery(term));
|
||||
Term term = new Term(TermConceptPropertyBinder.CONCEPT_FIELD_PROPERTY_PREFIX + theFilter.getProperty(), value);
|
||||
theB.must(theF.match().field(term.field()).matching(term.text()));
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
private void handleFilterLoincCopyright(SearchPredicateFactory theF, BooleanPredicateClausesStep<?> theB, ValueSet.ConceptSetFilterComponent theFilter) {
|
||||
if (theFilter.getOp() == ValueSet.FilterOperator.EQUAL) {
|
||||
|
||||
String copyrightFilterValue = defaultString(theFilter.getValue()).toLowerCase();
|
||||
switch (copyrightFilterValue) {
|
||||
case "3rdparty":
|
||||
logFilteringValueOnProperty(theFilter.getValue(), theFilter.getProperty());
|
||||
addFilterLoincCopyright3rdParty(theF, theB);
|
||||
break;
|
||||
case "loinc":
|
||||
logFilteringValueOnProperty(theFilter.getValue(), theFilter.getProperty());
|
||||
addFilterLoincCopyrightLoinc(theF, theB);
|
||||
break;
|
||||
default:
|
||||
throwInvalidRequestForValueOnProperty(theFilter.getValue(), theFilter.getProperty());
|
||||
}
|
||||
|
||||
} else {
|
||||
throwInvalidRequestForOpOnProperty(theFilter.getOp(), theFilter.getProperty());
|
||||
}
|
||||
}
|
||||
|
||||
private void addFilterLoincCopyrightLoinc(SearchPredicateFactory thePredicateFactory, BooleanPredicateClausesStep<?> theBooleanClause) {
|
||||
theBooleanClause.mustNot(thePredicateFactory.exists().field(TermConceptPropertyBinder.CONCEPT_FIELD_PROPERTY_PREFIX + "EXTERNAL_COPYRIGHT_NOTICE"));
|
||||
}
|
||||
|
||||
private void addFilterLoincCopyright3rdParty(SearchPredicateFactory thePredicateFactory, BooleanPredicateClausesStep<?> theBooleanClause) {
|
||||
//TODO GGG HS These used to be Term term = new Term(TermConceptPropertyBinder.CONCEPT_FIELD_PROPERTY_PREFIX + "EXTERNAL_COPYRIGHT_NOTICE", ".*");, which was lucene-specific.
|
||||
//TODO GGG HS ask diederik if this is equivalent.
|
||||
//This old .* regex is the same as an existence check on a field, which I've implemented here.
|
||||
theBooleanClause.must(thePredicateFactory.exists().field(TermConceptPropertyBinder.CONCEPT_FIELD_PROPERTY_PREFIX + "EXTERNAL_COPYRIGHT_NOTICE"));
|
||||
}
|
||||
|
||||
private void handleFilterLoincAncestor2(String theSystem, SearchPredicateFactory f, BooleanPredicateClausesStep<?> b, ValueSet.ConceptSetFilterComponent theFilter) {
|
||||
switch (theFilter.getOp()) {
|
||||
case EQUAL:
|
||||
addLoincFilterAncestorEqual(theSystem, f, b, theFilter);
|
||||
break;
|
||||
case IN:
|
||||
addLoincFilterAncestorIn(theSystem, f, b, theFilter);
|
||||
break;
|
||||
default:
|
||||
throw new InvalidRequestException("Don't know how to handle op=" + theFilter.getOp() + " on property " + theFilter.getProperty());
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private void addLoincFilterAncestorEqual(String theSystem, SearchPredicateFactory f, BooleanPredicateClausesStep<?> b, ValueSet.ConceptSetFilterComponent theFilter) {
|
||||
addLoincFilterAncestorEqual(theSystem, f, b, theFilter.getProperty(), theFilter.getValue());
|
||||
}
|
||||
|
||||
private void addLoincFilterAncestorEqual(String theSystem, SearchPredicateFactory f, BooleanPredicateClausesStep<?> b, String theProperty, String theValue) {
|
||||
List<Term> terms = getAncestorTerms(theSystem, theProperty, theValue);
|
||||
b.must(f.bool(innerB -> terms.forEach(term -> innerB.should(f.match().field(term.field()).matching(term.text())))));
|
||||
}
|
||||
|
||||
private void addLoincFilterAncestorIn(String theSystem, SearchPredicateFactory f, BooleanPredicateClausesStep<?> b, ValueSet.ConceptSetFilterComponent theFilter) {
|
||||
String[] values = theFilter.getValue().split(",");
|
||||
List<Term> terms = new ArrayList<>();
|
||||
for (String value : values) {
|
||||
terms.addAll(getAncestorTerms(theSystem, theFilter.getProperty(), value));
|
||||
}
|
||||
b.must(f.bool(innerB -> terms.forEach(term -> innerB.should(f.match().field(term.field()).matching(term.text())))));
|
||||
|
||||
}
|
||||
|
||||
private void handleFilterLoincParentChild(SearchPredicateFactory f, BooleanPredicateClausesStep<?> b, ValueSet.ConceptSetFilterComponent theFilter) {
|
||||
switch (theFilter.getOp()) {
|
||||
case EQUAL:
|
||||
addLoincFilterParentChildEqual(f, b, theFilter.getProperty(), theFilter.getValue());
|
||||
break;
|
||||
case IN:
|
||||
addLoincFilterParentChildIn(f, b, theFilter);
|
||||
break;
|
||||
default:
|
||||
throw new InvalidRequestException("Don't know how to handle op=" + theFilter.getOp() + " on property " + theFilter.getProperty());
|
||||
}
|
||||
}
|
||||
|
||||
private void addLoincFilterParentChildIn(SearchPredicateFactory f, BooleanPredicateClausesStep<?> b, ValueSet.ConceptSetFilterComponent theFilter) {
|
||||
String[] values = theFilter.getValue().split(",");
|
||||
List<Term> terms = new ArrayList<>();
|
||||
for (String value : values) {
|
||||
logFilteringValueOnProperty(value, theFilter.getProperty());
|
||||
terms.add(getPropertyTerm(theFilter.getProperty(), value));
|
||||
}
|
||||
|
||||
//TODO GGG HS: Not sure if this is the right equivalent...seems to be no equivalent to `TermsQuery` in HS6.
|
||||
//Far as I'm aware, this is a single element of a MUST portion of a bool, which itself should contain a list of OR'ed options, e.g.
|
||||
// shape == round && color == (green || blue)
|
||||
b.must(f.bool(innerB -> terms.forEach(term -> innerB.should(f.match().field(term.field()).matching(term.text())))));
|
||||
}
|
||||
|
||||
private void addLoincFilterParentChildEqual(SearchPredicateFactory f, BooleanPredicateClausesStep<?> b, String theProperty, String theValue) {
|
||||
logFilteringValueOnProperty(theValue, theProperty);
|
||||
//TODO GGG HS: Not sure if this is the right equivalent...seems to be no equivalent to `TermsQuery` in HS6.
|
||||
//b.must(new TermsQuery(getPropertyTerm(theProperty, theValue)));
|
||||
//According to the DSL migration reference (https://docs.jboss.org/hibernate/search/6.0/migration/html_single/#queries-reference),
|
||||
//Since this property is handled with a specific analyzer, I'm not sure a terms match here is actually correct. The analyzer is literally just a whitespace tokenizer here.
|
||||
|
||||
b.must(f.match().field(TermConceptPropertyBinder.CONCEPT_FIELD_PROPERTY_PREFIX + theProperty).matching(theValue));
|
||||
}
|
||||
|
||||
private void handleFilterConceptAndCode(String theSystem, SearchPredicateFactory f, BooleanPredicateClausesStep<?> b, ValueSet.ConceptSetFilterComponent theFilter) {
|
||||
|
||||
|
||||
TermConcept code = findCode(theSystem, theFilter.getValue())
|
||||
.orElseThrow(() -> new InvalidRequestException("Invalid filter criteria - code does not exist: {" + Constants.codeSystemWithDefaultDescription(theSystem) + "}" + theFilter.getValue()));
|
||||
|
||||
if (theFilter.getOp() == ValueSet.FilterOperator.ISA) {
|
||||
ourLog.debug(" * Filtering on codes with a parent of {}/{}/{}", code.getId(), code.getCode(), code.getDisplay());
|
||||
|
||||
b.must(f.match().field("myParentPids").matching("" + code.getId()));
|
||||
} else {
|
||||
throw new InvalidRequestException("Don't know how to handle op=" + theFilter.getOp() + " on property " + theFilter.getProperty());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private void isCodeSystemLoincOrThrowInvalidRequestException(String theSystemIdentifier, String theProperty) {
|
||||
String systemUrl = getUrlFromIdentifier(theSystemIdentifier);
|
||||
if (!isCodeSystemLoinc(systemUrl)) {
|
||||
throw new InvalidRequestException("Invalid filter, property " + theProperty + " is LOINC-specific and cannot be used with system: " + systemUrl);
|
||||
}
|
||||
}
|
||||
|
||||
private boolean isCodeSystemLoinc(String theSystem) {
|
||||
return ITermLoaderSvc.LOINC_URI.equals(theSystem);
|
||||
}
|
||||
|
||||
private void handleFilterDisplay(SearchPredicateFactory f, BooleanPredicateClausesStep<?> b, ValueSet.ConceptSetFilterComponent theFilter) {
|
||||
if (theFilter.getProperty().equals("display:exact") && theFilter.getOp() == ValueSet.FilterOperator.EQUAL) {
|
||||
addDisplayFilterExact(f, b, theFilter);
|
||||
} else if (theFilter.getProperty().equals("display") && theFilter.getOp() == ValueSet.FilterOperator.EQUAL) {
|
||||
if (theFilter.getValue().trim().contains(" ")) {
|
||||
addDisplayFilterExact(f, b, theFilter);
|
||||
} else {
|
||||
addDisplayFilterInexact(f, b, theFilter);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void addDisplayFilterExact(SearchPredicateFactory f, BooleanPredicateClausesStep<?> bool, ValueSet.ConceptSetFilterComponent nextFilter) {
|
||||
bool.must(f.phrase().field("myDisplay").matching(nextFilter.getValue()));
|
||||
}
|
||||
|
||||
|
||||
|
||||
private void addDisplayFilterInexact(SearchPredicateFactory f, BooleanPredicateClausesStep<?> bool, ValueSet.ConceptSetFilterComponent nextFilter) {
|
||||
bool.must(f.phrase()
|
||||
.field("myDisplay").boost(4.0f)
|
||||
.field("myDisplayWordEdgeNGram").boost(1.0f)
|
||||
.field("myDisplayEdgeNGram").boost(1.0f)
|
||||
.matching(nextFilter.getValue().toLowerCase())
|
||||
.slop(2)
|
||||
);
|
||||
}
|
||||
|
||||
private Term getPropertyTerm(String theProperty, String theValue) {
|
||||
return new Term(TermConceptPropertyBinder.CONCEPT_FIELD_PROPERTY_PREFIX + theProperty, theValue);
|
||||
}
|
||||
|
||||
private List<Term> getAncestorTerms(String theSystem, String theProperty, String theValue) {
|
||||
List<Term> retVal = new ArrayList<>();
|
||||
|
||||
TermConcept code = findCode(theSystem, theValue)
|
||||
.orElseThrow(() -> new InvalidRequestException("Invalid filter criteria - code does not exist: {" + Constants.codeSystemWithDefaultDescription(theSystem) + "}" + theValue));
|
||||
|
||||
retVal.add(new Term("myParentPids", "" + code.getId()));
|
||||
logFilteringValueOnProperty(theValue, theProperty);
|
||||
|
||||
return retVal;
|
||||
}
|
||||
|
||||
@SuppressWarnings("EnumSwitchStatementWhichMissesCases")
|
||||
private void handleFilterLoincDescendant(String theSystem, SearchPredicateFactory f, BooleanPredicateClausesStep<?> b, ValueSet.ConceptSetFilterComponent theFilter) {
|
||||
switch (theFilter.getOp()) {
|
||||
case EQUAL:
|
||||
addLoincFilterDescendantEqual(theSystem, f, b, theFilter);
|
||||
break;
|
||||
case IN:
|
||||
addLoincFilterDescendantIn(theSystem, f,b , theFilter);
|
||||
break;
|
||||
default:
|
||||
throw new InvalidRequestException("Don't know how to handle op=" + theFilter.getOp() + " on property " + theFilter.getProperty());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private void addLoincFilterDescendantEqual(String theSystem, SearchPredicateFactory f, BooleanPredicateClausesStep<?> b, ValueSet.ConceptSetFilterComponent theFilter) {
|
||||
addLoincFilterDescendantEqual(theSystem, f, b, theFilter.getProperty(), theFilter.getValue());
|
||||
}
|
||||
|
||||
private void addLoincFilterDescendantIn(String theSystem, SearchPredicateFactory f, BooleanPredicateClausesStep<?> b, ValueSet.ConceptSetFilterComponent theFilter) {
|
||||
String[] values = theFilter.getValue().split(",");
|
||||
List<Term> terms = new ArrayList<>();
|
||||
for (String value : values) {
|
||||
terms.addAll(getDescendantTerms(theSystem, theFilter.getProperty(), value));
|
||||
}
|
||||
searchByParentPids(f, b, terms);
|
||||
}
|
||||
|
||||
private void addLoincFilterDescendantEqual(String theSystem, SearchPredicateFactory f, BooleanPredicateClausesStep<?> b, String theProperty, String theValue) {
|
||||
List<Term> terms = getDescendantTerms(theSystem, theProperty, theValue);
|
||||
searchByParentPids(f, b, terms);
|
||||
}
|
||||
|
||||
private void searchByParentPids(SearchPredicateFactory f, BooleanPredicateClausesStep<?> b, List<Term> theTerms) {
|
||||
List<Long> parentPids = convertTermsToParentPids(theTerms);
|
||||
b.must(f.bool(innerB -> {
|
||||
parentPids.forEach(pid -> innerB.should(f.match().field(theTerms.get(0).field()).matching(pid)));
|
||||
}));
|
||||
}
|
||||
|
||||
private List<Long> convertTermsToParentPids(List<Term> theTerms) {
|
||||
return theTerms.stream().map(Term::text).map(Long::valueOf).collect(Collectors.toList());
|
||||
}
|
||||
|
||||
|
||||
private List<Term> getDescendantTerms(String theSystem, String theProperty, String theValue) {
|
||||
List<Term> retVal = new ArrayList<>();
|
||||
|
||||
TermConcept code = findCode(theSystem, theValue)
|
||||
.orElseThrow(() -> new InvalidRequestException("Invalid filter criteria - code does not exist: {" + Constants.codeSystemWithDefaultDescription(theSystem) + "}" + theValue));
|
||||
|
||||
String[] parentPids = code.getParentPidsAsString().split(" ");
|
||||
for (String parentPid : parentPids) {
|
||||
if (!StringUtils.equals(parentPid, "NONE")) {
|
||||
retVal.add(new Term("myId", parentPid));
|
||||
}
|
||||
}
|
||||
logFilteringValueOnProperty(theValue, theProperty);
|
||||
|
||||
return retVal;
|
||||
}
|
||||
|
||||
|
||||
|
||||
private void logFilteringValueOnProperty(String theValue, String theProperty) {
|
||||
ourLog.debug(" * Filtering with value={} on property {}", theValue, theProperty);
|
||||
}
|
||||
|
||||
private void throwInvalidRequestForOpOnProperty(ValueSet.FilterOperator theOp, String theProperty) {
|
||||
throw new InvalidRequestException("Don't know how to handle op=" + theOp + " on property " + theProperty);
|
||||
}
|
||||
|
||||
private void throwInvalidRequestForValueOnProperty(String theValue, String theProperty) {
|
||||
throw new InvalidRequestException("Don't know how to handle value=" + theValue + " on property " + theProperty);
|
||||
}
|
||||
|
||||
private void expandWithoutHibernateSearch(IValueSetConceptAccumulator theValueSetCodeAccumulator, TermCodeSystemVersion theVersion, Set<String> theAddedCodes, ValueSet.ConceptSetComponent theInclude, String theSystem, boolean theAdd) {
|
||||
ourLog.trace("Hibernate search is not enabled");
|
||||
|
||||
|
@ -1665,7 +1726,6 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc {
|
|||
}
|
||||
|
||||
public void scheduleJob() {
|
||||
// TODO KHS what does this mean?
|
||||
// Register scheduled job to pre-expand ValueSets
|
||||
// In the future it would be great to make this a cluster-aware task somehow
|
||||
ScheduledJobDefinition vsJobDefinition = new ScheduledJobDefinition();
|
||||
|
@ -2054,12 +2114,12 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc {
|
|||
TermConcept codeB = findCode(codeBSystemIdentifier, conceptB.getCode())
|
||||
.orElseThrow(() -> new InvalidRequestException("Unknown code: " + conceptB));
|
||||
|
||||
FullTextEntityManager em = org.hibernate.search.jpa.Search.getFullTextEntityManager(myEntityManager);
|
||||
SearchSession searchSession = Search.session(myEntityManager);
|
||||
|
||||
ConceptSubsumptionOutcome subsumes;
|
||||
subsumes = testForSubsumption(em, codeA, codeB, ConceptSubsumptionOutcome.SUBSUMES);
|
||||
subsumes = testForSubsumption(searchSession, codeA, codeB, ConceptSubsumptionOutcome.SUBSUMES);
|
||||
if (subsumes == null) {
|
||||
subsumes = testForSubsumption(em, codeB, codeA, ConceptSubsumptionOutcome.SUBSUMEDBY);
|
||||
subsumes = testForSubsumption(searchSession, codeB, codeA, ConceptSubsumptionOutcome.SUBSUMEDBY);
|
||||
}
|
||||
if (subsumes == null) {
|
||||
subsumes = ConceptSubsumptionOutcome.NOTSUBSUMED;
|
||||
|
@ -2116,20 +2176,21 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc {
|
|||
}
|
||||
|
||||
@Nullable
|
||||
private ConceptSubsumptionOutcome testForSubsumption(FullTextEntityManager theEntityManager, TermConcept theLeft, TermConcept theRight, ConceptSubsumptionOutcome theOutput) {
|
||||
QueryBuilder qb = theEntityManager.getSearchFactory().buildQueryBuilder().forEntity(TermConcept.class).get();
|
||||
BooleanJunction<?> bool = qb.bool();
|
||||
bool.must(qb.keyword().onField("myId").matching(Long.toString(theRight.getId())).createQuery());
|
||||
bool.must(qb.keyword().onField("myParentPids").matching(Long.toString(theLeft.getId())).createQuery());
|
||||
Query luceneQuery = bool.createQuery();
|
||||
FullTextQuery jpaQuery = theEntityManager.createFullTextQuery(luceneQuery, TermConcept.class);
|
||||
jpaQuery.setMaxResults(1);
|
||||
if (jpaQuery.getResultList().size() > 0) {
|
||||
private ConceptSubsumptionOutcome testForSubsumption(SearchSession theSearchSession, TermConcept theLeft, TermConcept theRight, ConceptSubsumptionOutcome theOutput) {
|
||||
List<TermConcept> fetch = theSearchSession.search(TermConcept.class)
|
||||
.where(f -> f.bool()
|
||||
.must(f.match().field("myId").matching(theRight.getId()))
|
||||
.must(f.match().field("myParentPids").matching(Long.toString(theLeft.getId())))
|
||||
).fetchHits(1);
|
||||
|
||||
if (fetch.size() > 0) {
|
||||
return theOutput;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
private ArrayList<FhirVersionIndependentConcept> toVersionIndependentConcepts(String theSystem, Set<TermConcept> codes) {
|
||||
ArrayList<FhirVersionIndependentConcept> retVal = new ArrayList<>(codes.size());
|
||||
for (TermConcept next : codes) {
|
||||
|
|
|
@ -25,7 +25,6 @@ import ca.uhn.fhir.context.FhirVersionEnum;
|
|||
import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao;
|
||||
import ca.uhn.fhir.jpa.dao.IHapiJpaRepository;
|
||||
import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemVersionDao;
|
||||
|
@ -52,7 +51,6 @@ import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
|||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
|
||||
import ca.uhn.fhir.util.ObjectUtil;
|
||||
import ca.uhn.fhir.util.StopWatch;
|
||||
import ca.uhn.fhir.util.ValidateUtil;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
|
@ -62,19 +60,17 @@ import org.hl7.fhir.r4.model.ValueSet;
|
|||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.data.domain.PageRequest;
|
||||
import org.springframework.data.domain.Slice;
|
||||
import org.springframework.transaction.PlatformTransactionManager;
|
||||
import org.springframework.transaction.TransactionDefinition;
|
||||
import org.springframework.transaction.annotation.Propagation;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
import org.springframework.transaction.support.TransactionTemplate;
|
||||
import org.springframework.transaction.support.TransactionSynchronizationManager;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import javax.persistence.EntityManager;
|
||||
import javax.persistence.PersistenceContext;
|
||||
import javax.persistence.PersistenceContextType;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
|
@ -83,13 +79,16 @@ import java.util.HashSet;
|
|||
import java.util.IdentityHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.UUID;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.function.Supplier;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.defaultIfBlank;
|
||||
import static org.apache.commons.lang3.StringUtils.defaultString;
|
||||
import static org.apache.commons.lang3.StringUtils.isBlank;
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
|
||||
public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc {
|
||||
|
@ -183,57 +182,116 @@ public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc {
|
|||
if (cs == null) {
|
||||
throw new InvalidRequestException("Unknown code system: " + theSystem);
|
||||
}
|
||||
IIdType target = cs.getResource().getIdDt();
|
||||
|
||||
AtomicInteger removeCounter = new AtomicInteger(0);
|
||||
|
||||
for (TermConcept nextSuppliedConcept : theValue.getRootConcepts()) {
|
||||
Optional<TermConcept> conceptOpt = myTerminologySvc.findCode(theSystem, nextSuppliedConcept.getCode());
|
||||
if (conceptOpt.isPresent()) {
|
||||
TermConcept concept = conceptOpt.get();
|
||||
deleteConceptChildrenAndConcept(concept, removeCounter);
|
||||
}
|
||||
//We need to delete all termconcepts, and their children. This stream flattens the TermConcepts and their
|
||||
//children into a single set of TermConcept objects retrieved from the DB. Note that we have to do this because
|
||||
//deleteById() in JPA doesnt appear to actually commit or flush a transaction until way later, and we end up
|
||||
//iterating multiple times over the same elements, which screws up our counter.
|
||||
|
||||
|
||||
//Grab the actual entities
|
||||
List<TermConcept> collect = theValue.getRootConcepts().stream()
|
||||
.map(val -> myTerminologySvc.findCode(theSystem, val.getCode()))
|
||||
.filter(Optional::isPresent)
|
||||
.map(Optional::get)
|
||||
.collect(Collectors.toList());
|
||||
|
||||
//Iterate over the actual entities and fill out their children
|
||||
Set<TermConcept> allFoundTermConcepts = collect
|
||||
.stream()
|
||||
.flatMap(concept -> flattenChildren(concept).stream())
|
||||
.map(suppliedTermConcept -> myTerminologySvc.findCode(theSystem, suppliedTermConcept.getCode()))
|
||||
.filter(Optional::isPresent)
|
||||
.map(Optional::get)
|
||||
.collect(Collectors.toSet());
|
||||
|
||||
//Delete everything about these codes.
|
||||
for (TermConcept code : allFoundTermConcepts) {
|
||||
deleteEverythingRelatedToConcept(code, removeCounter);
|
||||
}
|
||||
|
||||
IIdType target = cs.getResource().getIdDt();
|
||||
return new UploadStatistics(removeCounter.get(), target);
|
||||
}
|
||||
|
||||
private void deleteEverythingRelatedToConcept(TermConcept theConcept, AtomicInteger theRemoveCounter) {
|
||||
|
||||
for (TermConceptParentChildLink nextParent : theConcept.getParents()) {
|
||||
nextParent.getParent().getChildren().remove(nextParent);
|
||||
myConceptParentChildLinkDao.deleteById(nextParent.getId());
|
||||
}
|
||||
for (TermConceptParentChildLink nextChild : theConcept.getChildren()) {
|
||||
nextChild.getChild().getParents().remove(nextChild);
|
||||
myConceptParentChildLinkDao.deleteById(nextChild.getId());
|
||||
}
|
||||
|
||||
for (TermConceptDesignation next : theConcept.getDesignations()) {
|
||||
myConceptDesignationDao.deleteById(next.getPid());
|
||||
}
|
||||
theConcept.getDesignations().clear();
|
||||
for (TermConceptProperty next : theConcept.getProperties()) {
|
||||
myConceptPropertyDao.deleteById(next.getPid());
|
||||
}
|
||||
theConcept.getProperties().clear();
|
||||
|
||||
ourLog.info("Deleting concept {} - Code {}", theConcept.getId(), theConcept.getCode());
|
||||
|
||||
myConceptDao.deleteById(theConcept.getId());
|
||||
// myEntityManager.remove(theConcept);
|
||||
|
||||
theRemoveCounter.incrementAndGet();
|
||||
}
|
||||
|
||||
private List<TermConcept> flattenChildren(TermConcept theTermConcept) {
|
||||
if (theTermConcept.getChildren().isEmpty()) {
|
||||
return Arrays.asList(theTermConcept);
|
||||
}
|
||||
|
||||
//Recursively flatten children
|
||||
List<TermConcept> childTermConcepts = theTermConcept.getChildren().stream()
|
||||
.map(TermConceptParentChildLink::getChild)
|
||||
.flatMap(childConcept -> flattenChildren(childConcept).stream())
|
||||
.collect(Collectors.toList());
|
||||
|
||||
//Add itself before its list of children
|
||||
childTermConcepts.add(0, theTermConcept);
|
||||
return childTermConcepts;
|
||||
}
|
||||
|
||||
@Override
|
||||
@Transactional(propagation = Propagation.NEVER)
|
||||
@Transactional
|
||||
public void deleteCodeSystem(TermCodeSystem theCodeSystem) {
|
||||
assert TransactionSynchronizationManager.isActualTransactionActive();
|
||||
|
||||
ourLog.info(" * Deleting code system {}", theCodeSystem.getPid());
|
||||
|
||||
TransactionTemplate txTemplate = new TransactionTemplate(myTransactionManager);
|
||||
txTemplate.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRED);
|
||||
txTemplate.executeWithoutResult(t -> {
|
||||
myEntityManager.flush();
|
||||
TermCodeSystem cs = myCodeSystemDao.findById(theCodeSystem.getPid()).orElseThrow(IllegalStateException::new);
|
||||
cs.setCurrentVersion(null);
|
||||
myCodeSystemDao.save(cs);
|
||||
myCodeSystemDao.flush();
|
||||
});
|
||||
myEntityManager.flush();
|
||||
TermCodeSystem cs = myCodeSystemDao.findById(theCodeSystem.getPid()).orElseThrow(IllegalStateException::new);
|
||||
cs.setCurrentVersion(null);
|
||||
myCodeSystemDao.save(cs);
|
||||
myCodeSystemDao.flush();
|
||||
|
||||
List<Long> codeSystemVersionPids = txTemplate.execute(t -> {
|
||||
List<TermCodeSystemVersion> codeSystemVersions = myCodeSystemVersionDao.findByCodeSystemPid(theCodeSystem.getPid());
|
||||
return codeSystemVersions
|
||||
.stream()
|
||||
.map(v -> v.getPid())
|
||||
.collect(Collectors.toList());
|
||||
});
|
||||
List<TermCodeSystemVersion> codeSystemVersions = myCodeSystemVersionDao.findByCodeSystemPid(theCodeSystem.getPid());
|
||||
List<Long> codeSystemVersionPids = codeSystemVersions
|
||||
.stream()
|
||||
.map(TermCodeSystemVersion::getPid)
|
||||
.collect(Collectors.toList());
|
||||
for (Long next : codeSystemVersionPids) {
|
||||
deleteCodeSystemVersion(next);
|
||||
}
|
||||
|
||||
txTemplate.executeWithoutResult(t -> {
|
||||
myCodeSystemVersionDao.deleteForCodeSystem(theCodeSystem);
|
||||
myCodeSystemDao.delete(theCodeSystem);
|
||||
myEntityManager.flush();
|
||||
});
|
||||
myCodeSystemVersionDao.deleteForCodeSystem(theCodeSystem);
|
||||
myCodeSystemDao.delete(theCodeSystem);
|
||||
myEntityManager.flush();
|
||||
}
|
||||
|
||||
@Override
|
||||
@Transactional(propagation = Propagation.NEVER)
|
||||
public void deleteCodeSystemVersion(TermCodeSystemVersion theCodeSystemVersion) {
|
||||
assert !TransactionSynchronizationManager.isActualTransactionActive();
|
||||
|
||||
// Delete TermCodeSystemVersion
|
||||
ourLog.info(" * Deleting TermCodeSystemVersion {}", theCodeSystemVersion.getCodeSystemVersionId());
|
||||
deleteCodeSystemVersion(theCodeSystemVersion.getPid());
|
||||
|
@ -322,11 +380,15 @@ public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc {
|
|||
}
|
||||
|
||||
@Override
|
||||
@Transactional(propagation = Propagation.REQUIRED)
|
||||
@Transactional
|
||||
public IIdType storeNewCodeSystemVersion(CodeSystem theCodeSystemResource, TermCodeSystemVersion theCodeSystemVersion, RequestDetails theRequest, List<ValueSet> theValueSets, List<ConceptMap> theConceptMaps) {
|
||||
assert TransactionSynchronizationManager.isActualTransactionActive();
|
||||
|
||||
Validate.notBlank(theCodeSystemResource.getUrl(), "theCodeSystemResource must have a URL");
|
||||
|
||||
// Note that this creates the TermCodeSystem and TermCodeSystemVersion entities if needed
|
||||
IIdType csId = myTerminologyVersionAdapterSvc.createOrUpdateCodeSystem(theCodeSystemResource);
|
||||
|
||||
ResourcePersistentId codeSystemResourcePid = myIdHelperService.resolveResourcePersistentIds(RequestPartitionId.allPartitions(), csId.getResourceType(), csId.getIdPart());
|
||||
ResourceTable resource = myResourceTableDao.getOne(codeSystemResourcePid.getIdAsLong());
|
||||
|
||||
|
@ -343,35 +405,52 @@ public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc {
|
|||
}
|
||||
|
||||
@Override
|
||||
@Transactional(propagation = Propagation.REQUIRED)
|
||||
public void storeNewCodeSystemVersion(ResourcePersistentId theCodeSystemResourcePid, String theSystemUri, String theSystemName, String theSystemVersionId, TermCodeSystemVersion theCodeSystemVersion, ResourceTable theCodeSystemResourceTable) {
|
||||
@Transactional
|
||||
public void storeNewCodeSystemVersion(ResourcePersistentId theCodeSystemResourcePid, String theSystemUri, String theSystemName, String theCodeSystemVersionId, TermCodeSystemVersion theCodeSystemVersion, ResourceTable theCodeSystemResourceTable) {
|
||||
assert TransactionSynchronizationManager.isActualTransactionActive();
|
||||
|
||||
ourLog.debug("Storing code system");
|
||||
|
||||
ValidateUtil.isTrueOrThrowInvalidRequest(theCodeSystemVersion.getResource() != null, "No resource supplied");
|
||||
TermCodeSystemVersion codeSystemToStore = theCodeSystemVersion;
|
||||
ValidateUtil.isTrueOrThrowInvalidRequest(codeSystemToStore.getResource() != null, "No resource supplied");
|
||||
ValidateUtil.isNotBlankOrThrowInvalidRequest(theSystemUri, "No system URI supplied");
|
||||
|
||||
// Grab the existing version so we can delete it
|
||||
TermCodeSystem codeSystem = getOrCreateDistinctTermCodeSystem(theCodeSystemResourcePid, theSystemUri, theSystemName, theCodeSystemVersionId, theCodeSystemResourceTable);
|
||||
|
||||
List<TermCodeSystemVersion> existing = myCodeSystemVersionDao.findByCodeSystemResourcePid(theCodeSystemResourcePid.getIdAsLong());
|
||||
|
||||
/*
|
||||
* Delete version being replaced.
|
||||
*/
|
||||
|
||||
for (TermCodeSystemVersion next : existing) {
|
||||
ourLog.info("Deleting old code system version {}", next.getPid());
|
||||
Long codeSystemVersionPid = next.getPid();
|
||||
deleteCodeSystemVersion(codeSystemVersionPid);
|
||||
if (Objects.equals(next.getCodeSystemVersionId(), theCodeSystemVersionId) && myConceptDao.countByCodeSystemVersion(next.getPid()) == 0) {
|
||||
|
||||
/*
|
||||
* If we already have a CodeSystemVersion that matches the version we're storing, we
|
||||
* can reuse it.
|
||||
*/
|
||||
next.setCodeSystemDisplayName(theSystemName);
|
||||
codeSystemToStore = next;
|
||||
|
||||
} else {
|
||||
|
||||
/*
|
||||
* If we already have a TermCodeSystemVersion that corresponds to the FHIR Resource ID we're
|
||||
* adding a version to, we will mark it for deletion. For any one resource there can only
|
||||
* be one TermCodeSystemVersion entity in the DB. Multiple versions of a codesystem uses
|
||||
* multiple CodeSystem resources with CodeSystem.version set differently (as opposed to
|
||||
* multiple versions of the same CodeSystem, where CodeSystem.meta.versionId is different)
|
||||
*/
|
||||
next.setCodeSystemVersionId("DELETED_" + UUID.randomUUID().toString());
|
||||
myCodeSystemVersionDao.saveAndFlush(next);
|
||||
myDeferredStorageSvc.deleteCodeSystemVersion(next);
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* Do the upload
|
||||
*/
|
||||
|
||||
TermCodeSystem codeSystem = getOrCreateDistinctTermCodeSystem(theCodeSystemResourcePid, theSystemUri, theSystemName, theSystemVersionId, theCodeSystemResourceTable);
|
||||
theCodeSystemVersion.setCodeSystem(codeSystem);
|
||||
|
||||
theCodeSystemVersion.setCodeSystemDisplayName(theSystemName);
|
||||
theCodeSystemVersion.setCodeSystemVersionId(theSystemVersionId);
|
||||
codeSystemToStore.setCodeSystem(codeSystem);
|
||||
codeSystemToStore.setCodeSystemDisplayName(theSystemName);
|
||||
codeSystemToStore.setCodeSystemVersionId(theCodeSystemVersionId);
|
||||
|
||||
ourLog.debug("Validating all codes in CodeSystem for storage (this can take some time for large sets)");
|
||||
|
||||
|
@ -379,40 +458,42 @@ public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc {
|
|||
ArrayList<String> conceptsStack = new ArrayList<>();
|
||||
IdentityHashMap<TermConcept, Object> allConcepts = new IdentityHashMap<>();
|
||||
int totalCodeCount = 0;
|
||||
for (TermConcept next : theCodeSystemVersion.getConcepts()) {
|
||||
totalCodeCount += validateConceptForStorage(next, theCodeSystemVersion, conceptsStack, allConcepts);
|
||||
Collection<TermConcept> conceptsToSave = theCodeSystemVersion.getConcepts();
|
||||
for (TermConcept next : conceptsToSave) {
|
||||
totalCodeCount += validateConceptForStorage(next, codeSystemToStore, conceptsStack, allConcepts);
|
||||
}
|
||||
|
||||
ourLog.debug("Saving version containing {} concepts", totalCodeCount);
|
||||
|
||||
TermCodeSystemVersion codeSystemVersion = myCodeSystemVersionDao.saveAndFlush(theCodeSystemVersion);
|
||||
if (codeSystemToStore.getPid() == null) {
|
||||
codeSystemToStore = myCodeSystemVersionDao.saveAndFlush(codeSystemToStore);
|
||||
}
|
||||
|
||||
ourLog.debug("Saving code system");
|
||||
|
||||
codeSystem.setCurrentVersion(theCodeSystemVersion);
|
||||
codeSystem = myCodeSystemDao.saveAndFlush(codeSystem);
|
||||
codeSystem.setCurrentVersion(codeSystemToStore);
|
||||
if (codeSystem.getPid() == null) {
|
||||
codeSystem = myCodeSystemDao.saveAndFlush(codeSystem);
|
||||
}
|
||||
|
||||
ourLog.debug("Setting CodeSystemVersion[{}] on {} concepts...", codeSystem.getPid(), totalCodeCount);
|
||||
|
||||
for (TermConcept next : theCodeSystemVersion.getConcepts()) {
|
||||
populateVersion(next, codeSystemVersion);
|
||||
for (TermConcept next : conceptsToSave) {
|
||||
populateVersion(next, codeSystemToStore);
|
||||
}
|
||||
|
||||
ourLog.debug("Saving {} concepts...", totalCodeCount);
|
||||
|
||||
IdentityHashMap<TermConcept, Object> conceptsStack2 = new IdentityHashMap<>();
|
||||
for (TermConcept next : theCodeSystemVersion.getConcepts()) {
|
||||
persistChildren(next, codeSystemVersion, conceptsStack2, totalCodeCount);
|
||||
for (TermConcept next : conceptsToSave) {
|
||||
persistChildren(next, codeSystemToStore, conceptsStack2, totalCodeCount);
|
||||
}
|
||||
|
||||
ourLog.debug("Done saving concepts, flushing to database");
|
||||
|
||||
myConceptDao.flush();
|
||||
myConceptParentChildLinkDao.flush();
|
||||
|
||||
if (myDeferredStorageSvc.isStorageQueueEmpty() == false) {
|
||||
ourLog.info("Note that some concept saving has been deferred");
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private TermCodeSystemVersion getExistingTermCodeSystemVersion(Long theCodeSystemVersionPid, String theCodeSystemVersion) {
|
||||
|
@ -427,60 +508,24 @@ public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc {
|
|||
}
|
||||
|
||||
private void deleteCodeSystemVersion(final Long theCodeSystemVersionPid) {
|
||||
ourLog.info(" * Deleting code system version {}", theCodeSystemVersionPid);
|
||||
assert TransactionSynchronizationManager.isActualTransactionActive();
|
||||
ourLog.info(" * Marking code system version {} for deletion", theCodeSystemVersionPid);
|
||||
|
||||
PageRequest page1000 = PageRequest.of(0, 1000);
|
||||
|
||||
// Parent/Child links
|
||||
{
|
||||
String descriptor = "parent/child links";
|
||||
Supplier<Slice<Long>> loader = () -> myConceptParentChildLinkDao.findIdsByCodeSystemVersion(page1000, theCodeSystemVersionPid);
|
||||
Supplier<Integer> counter = () -> myConceptParentChildLinkDao.countByCodeSystemVersion(theCodeSystemVersionPid);
|
||||
doDelete(descriptor, loader, counter, myConceptParentChildLinkDao);
|
||||
}
|
||||
|
||||
// Properties
|
||||
{
|
||||
String descriptor = "concept properties";
|
||||
Supplier<Slice<Long>> loader = () -> myConceptPropertyDao.findIdsByCodeSystemVersion(page1000, theCodeSystemVersionPid);
|
||||
Supplier<Integer> counter = () -> myConceptPropertyDao.countByCodeSystemVersion(theCodeSystemVersionPid);
|
||||
doDelete(descriptor, loader, counter, myConceptPropertyDao);
|
||||
}
|
||||
|
||||
// Designations
|
||||
{
|
||||
String descriptor = "concept designations";
|
||||
Supplier<Slice<Long>> loader = () -> myConceptDesignationDao.findIdsByCodeSystemVersion(page1000, theCodeSystemVersionPid);
|
||||
Supplier<Integer> counter = () -> myConceptDesignationDao.countByCodeSystemVersion(theCodeSystemVersionPid);
|
||||
doDelete(descriptor, loader, counter, myConceptDesignationDao);
|
||||
}
|
||||
|
||||
// Concepts
|
||||
{
|
||||
String descriptor = "concepts";
|
||||
// For some reason, concepts are much slower to delete, so use a smaller batch size
|
||||
PageRequest page100 = PageRequest.of(0, 100);
|
||||
Supplier<Slice<Long>> loader = () -> myConceptDao.findIdsByCodeSystemVersion(page100, theCodeSystemVersionPid);
|
||||
Supplier<Integer> counter = () -> myConceptDao.countByCodeSystemVersion(theCodeSystemVersionPid);
|
||||
doDelete(descriptor, loader, counter, myConceptDao);
|
||||
}
|
||||
|
||||
TransactionTemplate txTemplate = new TransactionTemplate(myTransactionManager);
|
||||
txTemplate.executeWithoutResult(tx -> {
|
||||
Optional<TermCodeSystem> codeSystemOpt = myCodeSystemDao.findWithCodeSystemVersionAsCurrentVersion(theCodeSystemVersionPid);
|
||||
if (codeSystemOpt.isPresent()) {
|
||||
TermCodeSystem codeSystem = codeSystemOpt.get();
|
||||
Optional<TermCodeSystem> codeSystemOpt = myCodeSystemDao.findWithCodeSystemVersionAsCurrentVersion(theCodeSystemVersionPid);
|
||||
if (codeSystemOpt.isPresent()) {
|
||||
TermCodeSystem codeSystem = codeSystemOpt.get();
|
||||
if (codeSystem.getCurrentVersion() != null && codeSystem.getCurrentVersion().getPid().equals(theCodeSystemVersionPid)) {
|
||||
ourLog.info(" * Removing code system version {} as current version of code system {}", theCodeSystemVersionPid, codeSystem.getPid());
|
||||
codeSystem.setCurrentVersion(null);
|
||||
myCodeSystemDao.save(codeSystem);
|
||||
myCodeSystemDao.flush();
|
||||
}
|
||||
}
|
||||
|
||||
ourLog.info(" * Deleting code system version");
|
||||
myCodeSystemVersionDao.delete(theCodeSystemVersionPid);
|
||||
myCodeSystemVersionDao.flush();
|
||||
});
|
||||
TermCodeSystemVersion codeSystemVersion = myCodeSystemVersionDao.findById(theCodeSystemVersionPid).orElseThrow(() -> new IllegalStateException());
|
||||
codeSystemVersion.setCodeSystemVersionId("DELETED_" + UUID.randomUUID().toString());
|
||||
myCodeSystemVersionDao.save(codeSystemVersion);
|
||||
|
||||
myDeferredStorageSvc.deleteCodeSystemVersion(codeSystemVersion);
|
||||
}
|
||||
|
||||
private void validateDstu3OrNewer() {
|
||||
|
@ -635,13 +680,12 @@ public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc {
|
|||
}
|
||||
|
||||
private void populateVersion(TermConcept theNext, TermCodeSystemVersion theCodeSystemVersion) {
|
||||
if (theNext.getCodeSystemVersion() != null) {
|
||||
return;
|
||||
}
|
||||
theNext.setCodeSystemVersion(theCodeSystemVersion);
|
||||
for (TermConceptParentChildLink next : theNext.getChildren()) {
|
||||
populateVersion(next.getChild(), theCodeSystemVersion);
|
||||
}
|
||||
theNext.getProperties().forEach(t->t.setCodeSystemVersion(theCodeSystemVersion));
|
||||
theNext.getDesignations().forEach(t->t.setCodeSystemVersion(theCodeSystemVersion));
|
||||
}
|
||||
|
||||
private void saveConceptLink(TermConceptParentChildLink next) {
|
||||
|
@ -702,7 +746,7 @@ public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc {
|
|||
// Check if a TermCodeSystemVersion entity already exists for this TermCodeSystem and version.
|
||||
codeSystemVersionEntity = myCodeSystemVersionDao.findByCodeSystemPidAndVersion(theCodeSystem.getPid(), theSystemVersionId);
|
||||
if (codeSystemVersionEntity != null) {
|
||||
msg = myContext.getLocalizer().getMessage(BaseTermReadSvcImpl.class, "cannotCreateDuplicateCodeSystemUrlAndVersion", theSystemUri, theSystemVersionId, codeSystemVersionEntity.getResource().getIdDt().toUnqualifiedVersionless().getValue());
|
||||
msg = myContext.getLocalizer().getMessage(BaseTermReadSvcImpl.class, "cannotCreateDuplicateCodeSystemUrlAndVersion", theSystemUri, theSystemVersionId, codeSystemVersionEntity.getResource().getIdDt().toUnqualifiedVersionless().getValue());
|
||||
}
|
||||
}
|
||||
// Throw exception if the TermCodeSystemVersion is being duplicated.
|
||||
|
@ -719,58 +763,13 @@ public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc {
|
|||
theCodeSystemVersion.setCodeSystemVersionId(theCodeSystemResource.getVersion());
|
||||
}
|
||||
|
||||
private void deleteConceptChildrenAndConcept(TermConcept theConcept, AtomicInteger theRemoveCounter) {
|
||||
for (TermConceptParentChildLink nextChildLink : theConcept.getChildren()) {
|
||||
deleteConceptChildrenAndConcept(nextChildLink.getChild(), theRemoveCounter);
|
||||
}
|
||||
|
||||
myConceptParentChildLinkDao.deleteByConceptPid(theConcept.getId());
|
||||
|
||||
myConceptDesignationDao.deleteAll(theConcept.getDesignations());
|
||||
myConceptPropertyDao.deleteAll(theConcept.getProperties());
|
||||
|
||||
ourLog.info("Deleting concept {} - Code {}", theConcept.getId(), theConcept.getCode());
|
||||
myConceptDao.deleteByPid(theConcept.getId());
|
||||
theRemoveCounter.incrementAndGet();
|
||||
}
|
||||
|
||||
|
||||
@SuppressWarnings("ConstantConditions")
|
||||
private <T> void doDelete(String theDescriptor, Supplier<Slice<Long>> theLoader, Supplier<Integer> theCounter, IHapiJpaRepository<T> theDao) {
|
||||
TransactionTemplate txTemplate = new TransactionTemplate(myTransactionManager);
|
||||
txTemplate.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRED);
|
||||
|
||||
int count;
|
||||
ourLog.info(" * Deleting {}", theDescriptor);
|
||||
int totalCount = txTemplate.execute(t -> theCounter.get());
|
||||
StopWatch sw = new StopWatch();
|
||||
count = 0;
|
||||
while (true) {
|
||||
Slice<Long> link = txTemplate.execute(t -> theLoader.get());
|
||||
if (!link.hasContent()) {
|
||||
break;
|
||||
}
|
||||
|
||||
txTemplate.execute(t -> {
|
||||
link.forEach(id -> theDao.deleteByPid(id));
|
||||
theDao.flush();
|
||||
return null;
|
||||
});
|
||||
|
||||
count += link.getNumberOfElements();
|
||||
ourLog.info(" * {} {} deleted ({}/{}) remaining - {}/sec - ETA: {}", count, theDescriptor, count, totalCount, sw.formatThroughput(count, TimeUnit.SECONDS), sw.getEstimatedTimeRemaining(count, totalCount));
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
private int validateConceptForStorage(TermConcept theConcept, TermCodeSystemVersion theCodeSystem, ArrayList<String> theConceptsStack,
|
||||
private int validateConceptForStorage(TermConcept theConcept, TermCodeSystemVersion theCodeSystemVersion, ArrayList<String> theConceptsStack,
|
||||
IdentityHashMap<TermConcept, Object> theAllConcepts) {
|
||||
ValidateUtil.isTrueOrThrowInvalidRequest(theConcept.getCodeSystemVersion() != null, "CodeSystemVersion is null");
|
||||
ValidateUtil.isTrueOrThrowInvalidRequest(theConcept.getCodeSystemVersion() == theCodeSystem, "CodeSystems are not equal");
|
||||
ValidateUtil.isNotBlankOrThrowInvalidRequest(theConcept.getCode(), "CodeSystem contains a code with no code value");
|
||||
|
||||
theConcept.setCodeSystemVersion(theCodeSystemVersion);
|
||||
if (theConceptsStack.contains(theConcept.getCode())) {
|
||||
throw new InvalidRequestException("CodeSystem contains circular reference around code " + theConcept.getCode());
|
||||
}
|
||||
|
@ -785,8 +784,8 @@ public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc {
|
|||
}
|
||||
|
||||
for (TermConceptParentChildLink next : theConcept.getChildren()) {
|
||||
next.setCodeSystem(theCodeSystem);
|
||||
retVal += validateConceptForStorage(next.getChild(), theCodeSystem, theConceptsStack, theAllConcepts);
|
||||
next.setCodeSystem(theCodeSystemVersion);
|
||||
retVal += validateConceptForStorage(next.getChild(), theCodeSystemVersion, theConceptsStack, theAllConcepts);
|
||||
}
|
||||
|
||||
theConceptsStack.remove(theConceptsStack.size() - 1);
|
||||
|
|
|
@ -23,7 +23,9 @@ package ca.uhn.fhir.jpa.term;
|
|||
import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemVersionDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ITermConceptDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ITermConceptDesignationDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ITermConceptParentChildLinkDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ITermConceptPropertyDao;
|
||||
import ca.uhn.fhir.jpa.entity.TermCodeSystem;
|
||||
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
|
||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||
|
@ -44,31 +46,27 @@ import org.quartz.JobExecutionContext;
|
|||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.data.domain.PageRequest;
|
||||
import org.springframework.data.domain.Slice;
|
||||
import org.springframework.data.jpa.repository.JpaRepository;
|
||||
import org.springframework.transaction.PlatformTransactionManager;
|
||||
import org.springframework.transaction.TransactionDefinition;
|
||||
import org.springframework.transaction.annotation.Propagation;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
import org.springframework.transaction.support.TransactionSynchronizationManager;
|
||||
import org.springframework.transaction.support.TransactionTemplate;
|
||||
|
||||
import javax.annotation.PostConstruct;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.UUID;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.function.Supplier;
|
||||
|
||||
public class TermDeferredStorageSvcImpl implements ITermDeferredStorageSvc {
|
||||
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(TermDeferredStorageSvcImpl.class);
|
||||
@Autowired
|
||||
protected ITermConceptDao myConceptDao;
|
||||
@Autowired
|
||||
protected ITermCodeSystemDao myCodeSystemDao;
|
||||
@Autowired
|
||||
protected ITermCodeSystemVersionDao myCodeSystemVersionDao;
|
||||
@Autowired
|
||||
protected PlatformTransactionManager myTransactionMgr;
|
||||
private boolean myProcessDeferred = true;
|
||||
final private List<TermCodeSystem> myDeferredCodeSystemsDeletions = Collections.synchronizedList(new ArrayList<>());
|
||||
final private List<TermCodeSystemVersion> myDeferredCodeSystemVersionsDeletions = Collections.synchronizedList(new ArrayList<>());
|
||||
final private List<TermConcept> myDeferredConcepts = Collections.synchronizedList(new ArrayList<>());
|
||||
|
@ -76,6 +74,19 @@ public class TermDeferredStorageSvcImpl implements ITermDeferredStorageSvc {
|
|||
final private List<ConceptMap> myDeferredConceptMaps = Collections.synchronizedList(new ArrayList<>());
|
||||
final private List<TermConceptParentChildLink> myConceptLinksToSaveLater = Collections.synchronizedList(new ArrayList<>());
|
||||
@Autowired
|
||||
protected ITermConceptDao myConceptDao;
|
||||
@Autowired
|
||||
protected ITermCodeSystemDao myCodeSystemDao;
|
||||
@Autowired
|
||||
protected ITermCodeSystemVersionDao myCodeSystemVersionDao;
|
||||
@Autowired
|
||||
protected PlatformTransactionManager myTransactionMgr;
|
||||
@Autowired
|
||||
protected ITermConceptPropertyDao myConceptPropertyDao;
|
||||
@Autowired
|
||||
protected ITermConceptDesignationDao myConceptDesignationDao;
|
||||
private boolean myProcessDeferred = true;
|
||||
@Autowired
|
||||
private ITermConceptParentChildLinkDao myConceptParentChildLinkDao;
|
||||
@Autowired
|
||||
private ISchedulerService mySchedulerService;
|
||||
|
@ -120,7 +131,7 @@ public class TermDeferredStorageSvcImpl implements ITermDeferredStorageSvc {
|
|||
@Transactional
|
||||
public void deleteCodeSystemForResource(ResourceTable theCodeSystemToDelete) {
|
||||
List<TermCodeSystemVersion> codeSystemVersionsToDelete = myCodeSystemVersionDao.findByCodeSystemResourcePid(theCodeSystemToDelete.getResourceId());
|
||||
for (TermCodeSystemVersion codeSystemVersionToDelete : codeSystemVersionsToDelete){
|
||||
for (TermCodeSystemVersion codeSystemVersionToDelete : codeSystemVersionsToDelete) {
|
||||
if (codeSystemVersionToDelete != null) {
|
||||
myDeferredCodeSystemVersionsDeletions.add(codeSystemVersionToDelete);
|
||||
}
|
||||
|
@ -131,13 +142,6 @@ public class TermDeferredStorageSvcImpl implements ITermDeferredStorageSvc {
|
|||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void saveAllDeferred() {
|
||||
while (!isStorageQueueEmpty()) {
|
||||
saveDeferred();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setProcessDeferred(boolean theProcessDeferred) {
|
||||
myProcessDeferred = theProcessDeferred;
|
||||
|
@ -161,7 +165,7 @@ public class TermDeferredStorageSvcImpl implements ITermDeferredStorageSvc {
|
|||
ourLog.debug("Saving {} deferred concepts...", count);
|
||||
while (codeCount < count && myDeferredConcepts.size() > 0) {
|
||||
TermConcept next = myDeferredConcepts.remove(0);
|
||||
if(myCodeSystemVersionDao.findById(next.getCodeSystemVersion().getPid()).isPresent()) {
|
||||
if (myCodeSystemVersionDao.findById(next.getCodeSystemVersion().getPid()).isPresent()) {
|
||||
try {
|
||||
codeCount += myCodeSystemStorageSvc.saveConcept(next);
|
||||
} catch (Exception theE) {
|
||||
|
@ -232,6 +236,25 @@ public class TermDeferredStorageSvcImpl implements ITermDeferredStorageSvc {
|
|||
myDeferredCodeSystemVersionsDeletions.clear();
|
||||
}
|
||||
|
||||
private void runInTransaction(Runnable theRunnable) {
|
||||
assert !TransactionSynchronizationManager.isActualTransactionActive();
|
||||
|
||||
new TransactionTemplate(myTransactionMgr).executeWithoutResult(tx -> theRunnable.run());
|
||||
}
|
||||
|
||||
private <T> T runInTransaction(Supplier<T> theRunnable) {
|
||||
assert !TransactionSynchronizationManager.isActualTransactionActive();
|
||||
|
||||
return new TransactionTemplate(myTransactionMgr).execute(tx -> theRunnable.get());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void saveAllDeferred() {
|
||||
while (!isStorageQueueEmpty()) {
|
||||
saveDeferred();
|
||||
}
|
||||
}
|
||||
|
||||
@Transactional(propagation = Propagation.NEVER)
|
||||
@Override
|
||||
public synchronized void saveDeferred() {
|
||||
|
@ -249,10 +272,8 @@ public class TermDeferredStorageSvcImpl implements ITermDeferredStorageSvc {
|
|||
return;
|
||||
}
|
||||
|
||||
TransactionTemplate tt = new TransactionTemplate(myTransactionMgr);
|
||||
tt.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);
|
||||
if (isDeferredConceptsOrConceptLinksToSaveLater()) {
|
||||
tt.execute(t -> {
|
||||
runInTransaction(() -> {
|
||||
processDeferredConcepts();
|
||||
return null;
|
||||
});
|
||||
|
@ -261,7 +282,7 @@ public class TermDeferredStorageSvcImpl implements ITermDeferredStorageSvc {
|
|||
}
|
||||
|
||||
if (isDeferredValueSets()) {
|
||||
tt.execute(t -> {
|
||||
runInTransaction(() -> {
|
||||
processDeferredValueSets();
|
||||
return null;
|
||||
});
|
||||
|
@ -270,7 +291,7 @@ public class TermDeferredStorageSvcImpl implements ITermDeferredStorageSvc {
|
|||
}
|
||||
|
||||
if (isDeferredConceptMaps()) {
|
||||
tt.execute(t -> {
|
||||
runInTransaction(() -> {
|
||||
processDeferredConceptMaps();
|
||||
return null;
|
||||
});
|
||||
|
@ -278,25 +299,116 @@ public class TermDeferredStorageSvcImpl implements ITermDeferredStorageSvc {
|
|||
continue;
|
||||
}
|
||||
|
||||
if (isDeferredCodeSystemVersionDeletions()) {
|
||||
processDeferredCodeSystemVersionDeletions();
|
||||
}
|
||||
|
||||
if (isDeferredCodeSystemDeletions()) {
|
||||
processDeferredCodeSystemDeletions();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private boolean isDeferredCodeSystemVersionDeletions() {
|
||||
return !myDeferredCodeSystemVersionsDeletions.isEmpty();
|
||||
}
|
||||
|
||||
private void processDeferredCodeSystemDeletions() {
|
||||
|
||||
for (TermCodeSystemVersion next : myDeferredCodeSystemVersionsDeletions) {
|
||||
myCodeSystemStorageSvc.deleteCodeSystemVersion(next);
|
||||
}
|
||||
|
||||
myDeferredCodeSystemVersionsDeletions.clear();
|
||||
for (TermCodeSystem next : myDeferredCodeSystemsDeletions) {
|
||||
myCodeSystemStorageSvc.deleteCodeSystem(next);
|
||||
}
|
||||
myDeferredCodeSystemsDeletions.clear();
|
||||
}
|
||||
|
||||
private void processDeferredCodeSystemVersionDeletions() {
|
||||
for (TermCodeSystemVersion next : myDeferredCodeSystemVersionsDeletions) {
|
||||
processDeferredCodeSystemVersionDeletions(next.getPid());
|
||||
}
|
||||
|
||||
myDeferredCodeSystemVersionsDeletions.clear();
|
||||
}
|
||||
|
||||
private void processDeferredCodeSystemVersionDeletions(long theCodeSystemVersionPid) {
|
||||
assert !TransactionSynchronizationManager.isActualTransactionActive();
|
||||
ourLog.info(" * Deleting CodeSystemVersion[id={}]", theCodeSystemVersionPid);
|
||||
|
||||
PageRequest page1000 = PageRequest.of(0, 1000);
|
||||
|
||||
// Parent/Child links
|
||||
{
|
||||
String descriptor = "parent/child links";
|
||||
Supplier<Slice<Long>> loader = () -> myConceptParentChildLinkDao.findIdsByCodeSystemVersion(page1000, theCodeSystemVersionPid);
|
||||
Supplier<Integer> counter = () -> myConceptParentChildLinkDao.countByCodeSystemVersion(theCodeSystemVersionPid);
|
||||
doDelete(descriptor, loader, counter, myConceptParentChildLinkDao);
|
||||
}
|
||||
|
||||
// Properties
|
||||
{
|
||||
String descriptor = "concept properties";
|
||||
Supplier<Slice<Long>> loader = () -> myConceptPropertyDao.findIdsByCodeSystemVersion(page1000, theCodeSystemVersionPid);
|
||||
Supplier<Integer> counter = () -> myConceptPropertyDao.countByCodeSystemVersion(theCodeSystemVersionPid);
|
||||
doDelete(descriptor, loader, counter, myConceptPropertyDao);
|
||||
}
|
||||
|
||||
// Designations
|
||||
{
|
||||
String descriptor = "concept designations";
|
||||
Supplier<Slice<Long>> loader = () -> myConceptDesignationDao.findIdsByCodeSystemVersion(page1000, theCodeSystemVersionPid);
|
||||
Supplier<Integer> counter = () -> myConceptDesignationDao.countByCodeSystemVersion(theCodeSystemVersionPid);
|
||||
doDelete(descriptor, loader, counter, myConceptDesignationDao);
|
||||
}
|
||||
|
||||
// Concepts
|
||||
{
|
||||
String descriptor = "concepts";
|
||||
// For some reason, concepts are much slower to delete, so use a smaller batch size
|
||||
PageRequest page100 = PageRequest.of(0, 100);
|
||||
Supplier<Slice<Long>> loader = () -> myConceptDao.findIdsByCodeSystemVersion(page100, theCodeSystemVersionPid);
|
||||
Supplier<Integer> counter = () -> myConceptDao.countByCodeSystemVersion(theCodeSystemVersionPid);
|
||||
doDelete(descriptor, loader, counter, myConceptDao);
|
||||
}
|
||||
|
||||
runInTransaction(() -> {
|
||||
Optional<TermCodeSystem> codeSystemOpt = myCodeSystemDao.findWithCodeSystemVersionAsCurrentVersion(theCodeSystemVersionPid);
|
||||
if (codeSystemOpt.isPresent()) {
|
||||
TermCodeSystem codeSystem = codeSystemOpt.get();
|
||||
ourLog.info(" * Removing code system version {} as current version of code system {}", theCodeSystemVersionPid, codeSystem.getPid());
|
||||
codeSystem.setCurrentVersion(null);
|
||||
myCodeSystemDao.save(codeSystem);
|
||||
}
|
||||
|
||||
ourLog.info(" * Deleting code system version");
|
||||
Optional<TermCodeSystemVersion> csv = myCodeSystemVersionDao.findById(theCodeSystemVersionPid);
|
||||
if (csv.isPresent()) {
|
||||
myCodeSystemVersionDao.delete(csv.get());
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
}
|
||||
|
||||
private <T> void doDelete(String theDescriptor, Supplier<Slice<Long>> theLoader, Supplier<Integer> theCounter, JpaRepository<T, Long> theDao) {
|
||||
assert !TransactionSynchronizationManager.isActualTransactionActive();
|
||||
|
||||
int count;
|
||||
ourLog.info(" * Deleting {}", theDescriptor);
|
||||
int totalCount = runInTransaction(theCounter);
|
||||
StopWatch sw = new StopWatch();
|
||||
count = 0;
|
||||
while (true) {
|
||||
Slice<Long> link = runInTransaction(theLoader);
|
||||
if (!link.hasContent()) {
|
||||
break;
|
||||
}
|
||||
|
||||
runInTransaction(() -> link.forEach(theDao::deleteById));
|
||||
|
||||
count += link.getNumberOfElements();
|
||||
ourLog.info(" * {} {} deleted ({}/{}) remaining - {}/sec - ETA: {}", count, theDescriptor, count, totalCount, sw.formatThroughput(count, TimeUnit.SECONDS), sw.getEstimatedTimeRemaining(count, totalCount));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public boolean isStorageQueueEmpty() {
|
||||
boolean retVal = !isProcessDeferredPaused();
|
||||
|
@ -354,16 +466,6 @@ public class TermDeferredStorageSvcImpl implements ITermDeferredStorageSvc {
|
|||
mySchedulerService.scheduleLocalJob(5000, jobDefinition);
|
||||
}
|
||||
|
||||
public static class Job implements HapiJob {
|
||||
@Autowired
|
||||
private ITermDeferredStorageSvc myTerminologySvc;
|
||||
|
||||
@Override
|
||||
public void execute(JobExecutionContext theContext) {
|
||||
myTerminologySvc.saveDeferred();
|
||||
}
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
void setTransactionManagerForUnitTest(PlatformTransactionManager theTxManager) {
|
||||
myTransactionMgr = theTxManager;
|
||||
|
@ -395,5 +497,20 @@ public class TermDeferredStorageSvcImpl implements ITermDeferredStorageSvc {
|
|||
ourLog.info("isDeferredCodeSystemDeletions: {}", isDeferredCodeSystemDeletions());
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized void deleteCodeSystemVersion(TermCodeSystemVersion theCodeSystemVersion) {
|
||||
myDeferredCodeSystemVersionsDeletions.add(theCodeSystemVersion);
|
||||
}
|
||||
|
||||
public static class Job implements HapiJob {
|
||||
@Autowired
|
||||
private ITermDeferredStorageSvc myTerminologySvc;
|
||||
|
||||
@Override
|
||||
public void execute(JobExecutionContext theContext) {
|
||||
myTerminologySvc.saveDeferred();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -66,7 +66,7 @@ public class ValueSetExpansionComponentWithConceptAccumulator extends ValueSet.V
|
|||
myContext = theContext;
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
@Nonnull
|
||||
@Override
|
||||
public Integer getCapacityRemaining() {
|
||||
return (myMaxCapacity - myAddedConcepts) + mySkipCountRemaining;
|
||||
|
|
|
@ -21,6 +21,7 @@ package ca.uhn.fhir.jpa.term.api;
|
|||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.entity.TermCodeSystem;
|
||||
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
|
||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||
import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
|
@ -57,10 +58,13 @@ public interface ITermDeferredStorageSvc {
|
|||
|
||||
void deleteCodeSystemForResource(ResourceTable theCodeSystemResourceToDelete);
|
||||
|
||||
void deleteCodeSystemVersion(TermCodeSystemVersion theCodeSystemVersion);
|
||||
|
||||
/**
|
||||
* This is mostly here for unit tests - Saves any and all deferred concepts and links
|
||||
*/
|
||||
void saveAllDeferred();
|
||||
|
||||
void logQueueForUnitTest();
|
||||
|
||||
}
|
||||
|
|
|
@ -157,7 +157,7 @@ public class CircularQueueCaptureQueriesListener extends BaseCaptureQueriesListe
|
|||
}
|
||||
|
||||
/**
|
||||
* Returns all INSERT queries executed on the current thread - Index 0 is oldest
|
||||
* Returns all queries executed on the current thread - Index 0 is oldest
|
||||
*/
|
||||
public List<SqlQuery> getAllQueriesForCurrentThread() {
|
||||
return getQueriesForCurrentThreadStartingWith("");
|
||||
|
@ -252,14 +252,25 @@ public class CircularQueueCaptureQueriesListener extends BaseCaptureQueriesListe
|
|||
}
|
||||
|
||||
/**
|
||||
* Log all captured INSERT queries
|
||||
* Log all captured queries
|
||||
*/
|
||||
public void logAllQueriesForCurrentThread() {
|
||||
List<String> queries = getAllQueriesForCurrentThread()
|
||||
.stream()
|
||||
.map(CircularQueueCaptureQueriesListener::formatQueryAsSql)
|
||||
.collect(Collectors.toList());
|
||||
ourLog.info("Insert Queries:\n{}", String.join("\n", queries));
|
||||
ourLog.info("Queries:\n{}", String.join("\n", queries));
|
||||
}
|
||||
|
||||
/**
|
||||
* Log all captured queries
|
||||
*/
|
||||
public void logAllQueries() {
|
||||
List<String> queries = getCapturedQueries()
|
||||
.stream()
|
||||
.map(CircularQueueCaptureQueriesListener::formatQueryAsSql)
|
||||
.collect(Collectors.toList());
|
||||
ourLog.info("Queries:\n{}", String.join("\n", queries));
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -21,17 +21,24 @@ package ca.uhn.fhir.jpa.util;
|
|||
*/
|
||||
|
||||
|
||||
import org.hibernate.search.spatial.impl.Point;
|
||||
import org.hibernate.search.engine.spatial.GeoPoint;
|
||||
import org.hibernate.search.engine.spatial.GeoBoundingBox;
|
||||
import org.slf4j.Logger;
|
||||
|
||||
import static ca.uhn.fhir.jpa.searchparam.extractor.GeopointNormalizer.normalizeLatitude;
|
||||
import static ca.uhn.fhir.jpa.searchparam.extractor.GeopointNormalizer.normalizeLongitude;
|
||||
import static org.slf4j.LoggerFactory.getLogger;
|
||||
|
||||
public class CoordCalculator {
|
||||
private static final Logger ourLog = getLogger(CoordCalculator.class);
|
||||
public static final double MAX_SUPPORTED_DISTANCE_KM = 10000.0; // Slightly less than a quarter of the earth's circumference
|
||||
private static final double RADIUS_EARTH_KM = 6378.1;
|
||||
|
||||
// Source: https://stackoverflow.com/questions/7222382/get-lat-long-given-current-point-distance-and-bearing
|
||||
static Point findTarget(double theLatitudeDegrees, double theLongitudeDegrees, double theBearingDegrees, double theDistanceKm) {
|
||||
static GeoPoint findTarget(double theLatitudeDegrees, double theLongitudeDegrees, double theBearingDegrees, double theDistanceKm) {
|
||||
|
||||
double latitudeRadians = Math.toRadians(Point.normalizeLatitude(theLatitudeDegrees));
|
||||
double longitudeRadians = Math.toRadians(Point.normalizeLongitude(theLongitudeDegrees));
|
||||
double latitudeRadians = Math.toRadians(normalizeLatitude(theLatitudeDegrees));
|
||||
double longitudeRadians = Math.toRadians(normalizeLongitude(theLongitudeDegrees));
|
||||
double bearingRadians = Math.toRadians(theBearingDegrees);
|
||||
double distanceRadians = theDistanceKm / RADIUS_EARTH_KM;
|
||||
|
||||
|
@ -41,18 +48,23 @@ public class CoordCalculator {
|
|||
double targetLongitude = longitudeRadians + Math.atan2(Math.sin(bearingRadians) * Math.sin(distanceRadians) * Math.cos(latitudeRadians),
|
||||
Math.cos(distanceRadians)-Math.sin(latitudeRadians) * Math.sin(targetLatitude));
|
||||
|
||||
return Point.fromDegrees(Math.toDegrees(targetLatitude), Math.toDegrees(targetLongitude));
|
||||
double latitude = Math.toDegrees(targetLatitude);
|
||||
double longitude = Math.toDegrees(targetLongitude);
|
||||
|
||||
GeoPoint of = GeoPoint.of(normalizeLatitude(latitude), normalizeLongitude(longitude));
|
||||
return of;
|
||||
}
|
||||
|
||||
/**
|
||||
* Find a box around my coordinates such that the closest distance to each edge is the provided distance
|
||||
* @return
|
||||
*/
|
||||
public static SearchBox getBox(double theLatitudeDegrees, double theLongitudeDegrees, Double theDistanceKm) {
|
||||
public static GeoBoundingBox getBox(double theLatitudeDegrees, double theLongitudeDegrees, Double theDistanceKm) {
|
||||
double diagonalDistanceKm = theDistanceKm * Math.sqrt(2.0);
|
||||
|
||||
Point northEast = CoordCalculator.findTarget(theLatitudeDegrees, theLongitudeDegrees, 45.0, diagonalDistanceKm);
|
||||
Point southWest = CoordCalculator.findTarget(theLatitudeDegrees, theLongitudeDegrees, 225.0, diagonalDistanceKm);
|
||||
GeoPoint topLeft = CoordCalculator.findTarget(theLatitudeDegrees, theLongitudeDegrees, 315.0, diagonalDistanceKm);
|
||||
GeoPoint bottomRight = CoordCalculator.findTarget(theLatitudeDegrees, theLongitudeDegrees, 135.0, diagonalDistanceKm);
|
||||
|
||||
return new SearchBox(southWest, northEast);
|
||||
return GeoBoundingBox.of(topLeft, bottomRight);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,45 +0,0 @@
|
|||
package ca.uhn.fhir.jpa.util;
|
||||
|
||||
/*-
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2021 Smile CDR, Inc.
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import org.hibernate.search.spatial.impl.Point;
|
||||
|
||||
public class SearchBox {
|
||||
private final Point mySouthWest;
|
||||
private final Point myNorthEast;
|
||||
|
||||
public SearchBox(Point theSouthWest, Point theNorthEast) {
|
||||
mySouthWest = theSouthWest;
|
||||
myNorthEast = theNorthEast;
|
||||
}
|
||||
|
||||
public Point getSouthWest() {
|
||||
return mySouthWest;
|
||||
}
|
||||
|
||||
public Point getNorthEast() {
|
||||
return myNorthEast;
|
||||
}
|
||||
|
||||
public boolean crossesAntiMeridian() {
|
||||
return myNorthEast.getLongitude() < mySouthWest.getLongitude();
|
||||
}
|
||||
}
|
|
@ -33,7 +33,21 @@ import org.hibernate.validator.constraints.Length;
|
|||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.r4.model.InstantType;
|
||||
|
||||
import javax.persistence.*;
|
||||
import javax.persistence.Column;
|
||||
import javax.persistence.Embedded;
|
||||
import javax.persistence.EmbeddedId;
|
||||
import javax.persistence.Entity;
|
||||
import javax.persistence.ForeignKey;
|
||||
import javax.persistence.GeneratedValue;
|
||||
import javax.persistence.Index;
|
||||
import javax.persistence.JoinColumn;
|
||||
import javax.persistence.Lob;
|
||||
import javax.persistence.OneToMany;
|
||||
import javax.persistence.OneToOne;
|
||||
import javax.persistence.SequenceGenerator;
|
||||
import javax.persistence.Table;
|
||||
import javax.persistence.Transient;
|
||||
import javax.persistence.UniqueConstraint;
|
||||
import javax.validation.constraints.Size;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
|
@ -43,7 +57,9 @@ import java.lang.reflect.Modifier;
|
|||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Date;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
|
@ -99,6 +115,41 @@ public class TestUtil {
|
|||
}
|
||||
|
||||
private static void scanClass(Set<String> theNames, Class<?> theClazz, boolean theIsSuperClass) {
|
||||
Map<String, Integer> columnNameToLength = new HashMap<>();
|
||||
|
||||
scanClassOrSuperclass(theNames, theClazz, theIsSuperClass, columnNameToLength);
|
||||
|
||||
Table table = theClazz.getAnnotation(Table.class);
|
||||
if (table != null) {
|
||||
|
||||
// This is the length for MySQL per https://dev.mysql.com/doc/refman/8.0/en/innodb-limits.html
|
||||
// No idea why 3072.. what a weird limit but I'm sure they have their reason.
|
||||
int maxIndexLength = 3072;
|
||||
|
||||
for (UniqueConstraint nextIndex : table.uniqueConstraints()) {
|
||||
int indexLength = calculateIndexLength(nextIndex.columnNames(), columnNameToLength, theClazz, nextIndex.name());
|
||||
if (indexLength > maxIndexLength) {
|
||||
throw new IllegalStateException("Index '" + nextIndex.name() + "' is too long. Length is " + indexLength + " and must not exceed " + maxIndexLength + " which is the maximum MySQL length");
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private static int calculateIndexLength(String[] theColumnNames, Map<String, Integer> theColumnNameToLength, Class<?> theClazz, String theIndexName) {
|
||||
int retVal = 0;
|
||||
for (String nextName : theColumnNames) {
|
||||
Integer nextLength = theColumnNameToLength.get(nextName);
|
||||
if (nextLength == null) {
|
||||
throw new IllegalStateException("Index '" + theIndexName + "' references unknown column: " + nextName);
|
||||
}
|
||||
retVal += nextLength;
|
||||
}
|
||||
return retVal;
|
||||
}
|
||||
|
||||
private static void scanClassOrSuperclass(Set<String> theNames, Class<?> theClazz, boolean theIsSuperClass, Map<String, Integer> columnNameToLength) {
|
||||
ourLog.info("Scanning: {}", theClazz.getSimpleName());
|
||||
|
||||
Subselect subselect = theClazz.getAnnotation(Subselect.class);
|
||||
|
@ -131,15 +182,37 @@ public class TestUtil {
|
|||
OneToOne oneToOne = nextField.getAnnotation(OneToOne.class);
|
||||
boolean isOtherSideOfOneToManyMapping = oneToMany != null && isNotBlank(oneToMany.mappedBy());
|
||||
boolean isOtherSideOfOneToOneMapping = oneToOne != null && isNotBlank(oneToOne.mappedBy());
|
||||
boolean isField = nextField.getAnnotation(org.hibernate.search.annotations.Field.class) != null;
|
||||
boolean isField = nextField.getAnnotation(org.hibernate.search.mapper.pojo.mapping.definition.annotation.FullTextField.class) != null;
|
||||
isField |= nextField.getAnnotation(org.hibernate.search.mapper.pojo.mapping.definition.annotation.GenericField.class) != null;
|
||||
isField |= nextField.getAnnotation(org.hibernate.search.mapper.pojo.mapping.definition.annotation.ScaledNumberField.class) != null;
|
||||
Validate.isTrue(
|
||||
hasEmbedded ||
|
||||
hasColumn ||
|
||||
hasColumn ||
|
||||
hasJoinColumn ||
|
||||
isOtherSideOfOneToManyMapping ||
|
||||
isOtherSideOfOneToOneMapping ||
|
||||
hasEmbeddedId ||
|
||||
isField, "Non-transient has no @Column or @JoinColumn or @EmbeddedId: " + nextField);
|
||||
|
||||
int columnLength = 16;
|
||||
String columnName = null;
|
||||
if (hasColumn) {
|
||||
columnName = nextField.getAnnotation(Column.class).name();
|
||||
columnLength = nextField.getAnnotation(Column.class).length();
|
||||
}
|
||||
if (hasJoinColumn) {
|
||||
columnName = nextField.getAnnotation(JoinColumn.class).name();
|
||||
}
|
||||
|
||||
if (columnName != null) {
|
||||
if (nextField.getType().isAssignableFrom(String.class)) {
|
||||
// MySQL treats each char as the max possible byte count in UTF-8 for its calculations
|
||||
columnLength = columnLength * 4;
|
||||
}
|
||||
|
||||
columnNameToLength.put(columnName, columnLength);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
@ -149,7 +222,7 @@ public class TestUtil {
|
|||
return;
|
||||
}
|
||||
|
||||
scanClass(theNames, theClazz.getSuperclass(), true);
|
||||
scanClassOrSuperclass(theNames, theClazz.getSuperclass(), true, columnNameToLength);
|
||||
}
|
||||
|
||||
private static void scan(AnnotatedElement theAnnotatedElement, Set<String> theNames, boolean theIsSuperClass, boolean theIsView) {
|
||||
|
|
|
@ -1,26 +1,24 @@
|
|||
{
|
||||
"mappings" : {
|
||||
"ca.uhn.fhir.jpa.model.entity.ObservationIndexedCodeCodeableConceptEntity" : {
|
||||
"properties" : {
|
||||
"codeable_concept_id" : {
|
||||
"type" : "keyword"
|
||||
},
|
||||
"codingcode" : {
|
||||
"type" : "keyword"
|
||||
},
|
||||
"codingcode_system_hash" : {
|
||||
"type" : "keyword"
|
||||
},
|
||||
"codingdisplay" : {
|
||||
"type" : "keyword"
|
||||
},
|
||||
"codingsystem" : {
|
||||
"type" : "keyword"
|
||||
},
|
||||
"text" : {
|
||||
"type" : "keyword"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"mappings": {
|
||||
"properties": {
|
||||
"codeable_concept_id": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"codingcode": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"codingcode_system_hash": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"codingdisplay": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"codingsystem": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"text": {
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
{
|
||||
"mappings" : {
|
||||
"ca.uhn.fhir.jpa.model.entity.ObservationIndexedSearchParamLastNEntity" : {
|
||||
"properties" : {
|
||||
"codeconceptid" : {
|
||||
"type" : "keyword"
|
||||
|
@ -47,4 +46,3 @@
|
|||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,16 +1,19 @@
|
|||
package ca.uhn.fhir.jpa.config;
|
||||
|
||||
import ca.uhn.fhir.jpa.search.LuceneSearchMappingFactory;
|
||||
import ca.uhn.fhir.jpa.search.HapiLuceneAnalysisConfigurer;
|
||||
import ca.uhn.fhir.jpa.util.CircularQueueCaptureQueriesListener;
|
||||
import ca.uhn.fhir.jpa.util.CurrentThreadCaptureQueriesListener;
|
||||
import ca.uhn.fhir.rest.server.interceptor.RequestValidatingInterceptor;
|
||||
import ca.uhn.fhir.validation.IInstanceValidatorModule;
|
||||
import ca.uhn.fhir.validation.ResultSeverityEnum;
|
||||
import net.ttddyy.dsproxy.listener.ThreadQueryCountHolder;
|
||||
import net.ttddyy.dsproxy.listener.logging.SLF4JLogLevel;
|
||||
import net.ttddyy.dsproxy.support.ProxyDataSourceBuilder;
|
||||
import org.apache.commons.dbcp2.BasicDataSource;
|
||||
import org.hibernate.dialect.H2Dialect;
|
||||
import org.hibernate.search.backend.lucene.cfg.LuceneBackendSettings;
|
||||
import org.hibernate.search.backend.lucene.cfg.LuceneIndexSettings;
|
||||
import org.hibernate.search.engine.cfg.BackendSettings;
|
||||
import org.hibernate.search.mapper.orm.cfg.HibernateOrmMapperSettings;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
|
@ -23,9 +26,11 @@ import org.springframework.transaction.annotation.EnableTransactionManagement;
|
|||
import javax.sql.DataSource;
|
||||
import java.sql.Connection;
|
||||
import java.sql.SQLException;
|
||||
import java.util.Map;
|
||||
import java.util.Properties;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import static ca.uhn.fhir.jpa.dao.BaseJpaTest.buildHeapLuceneHibernateSearchProperties;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
|
||||
@Configuration
|
||||
|
@ -148,12 +153,13 @@ public class TestDstu2Config extends BaseJavaConfigDstu2 {
|
|||
extraProperties.put("hibernate.show_sql", "false");
|
||||
extraProperties.put("hibernate.hbm2ddl.auto", "update");
|
||||
extraProperties.put("hibernate.dialect", H2Dialect.class.getName());
|
||||
extraProperties.put("hibernate.search.model_mapping", LuceneSearchMappingFactory.class.getName());
|
||||
extraProperties.put("hibernate.search.default.directory_provider", "local-heap");
|
||||
extraProperties.put("hibernate.search.lucene_version", "LUCENE_CURRENT");
|
||||
|
||||
extraProperties.putAll(buildHeapLuceneHibernateSearchProperties());
|
||||
|
||||
return extraProperties;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Bean which validates incoming requests
|
||||
*/
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
package ca.uhn.fhir.jpa.config;
|
||||
|
||||
import ca.uhn.fhir.jpa.search.LuceneSearchMappingFactory;
|
||||
import ca.uhn.fhir.jpa.search.HapiLuceneAnalysisConfigurer;
|
||||
import ca.uhn.fhir.jpa.subscription.match.deliver.email.IEmailSender;
|
||||
import ca.uhn.fhir.jpa.subscription.match.deliver.email.JavaMailEmailSender;
|
||||
import ca.uhn.fhir.jpa.util.CircularQueueCaptureQueriesListener;
|
||||
|
@ -10,6 +10,10 @@ import ca.uhn.fhir.validation.ResultSeverityEnum;
|
|||
import net.ttddyy.dsproxy.support.ProxyDataSourceBuilder;
|
||||
import org.apache.commons.dbcp2.BasicDataSource;
|
||||
import org.hibernate.dialect.H2Dialect;
|
||||
import org.hibernate.search.backend.lucene.cfg.LuceneBackendSettings;
|
||||
import org.hibernate.search.backend.lucene.cfg.LuceneIndexSettings;
|
||||
import org.hibernate.search.engine.cfg.BackendSettings;
|
||||
import org.hibernate.search.mapper.orm.cfg.HibernateOrmMapperSettings;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.context.annotation.Import;
|
||||
|
@ -151,10 +155,13 @@ public class TestDstu3Config extends BaseJavaConfigDstu3 {
|
|||
extraProperties.put("hibernate.show_sql", "false");
|
||||
extraProperties.put("hibernate.hbm2ddl.auto", "update");
|
||||
extraProperties.put("hibernate.dialect", H2Dialect.class.getName());
|
||||
extraProperties.put("hibernate.search.model_mapping", LuceneSearchMappingFactory.class.getName());
|
||||
extraProperties.put("hibernate.search.default.directory_provider", "local-heap");
|
||||
extraProperties.put("hibernate.search.lucene_version", "LUCENE_CURRENT");
|
||||
extraProperties.put("hibernate.search.autoregister_listeners", "true");
|
||||
|
||||
extraProperties.put(BackendSettings.backendKey(BackendSettings.TYPE), "lucene");
|
||||
extraProperties.put(BackendSettings.backendKey(LuceneBackendSettings.ANALYSIS_CONFIGURER), HapiLuceneAnalysisConfigurer.class.getName());
|
||||
extraProperties.put(BackendSettings.backendKey(LuceneIndexSettings.DIRECTORY_TYPE), "local-heap");
|
||||
extraProperties.put(BackendSettings.backendKey(LuceneBackendSettings.LUCENE_VERSION), "LUCENE_CURRENT");
|
||||
extraProperties.put(HibernateOrmMapperSettings.ENABLED, "true");
|
||||
|
||||
return extraProperties;
|
||||
}
|
||||
|
||||
|
|
|
@ -6,6 +6,7 @@ import ca.uhn.fhir.jpa.batch.svc.BatchJobSubmitterImpl;
|
|||
import ca.uhn.fhir.jpa.binstore.IBinaryStorageSvc;
|
||||
import ca.uhn.fhir.jpa.binstore.MemoryBinaryStorageSvcImpl;
|
||||
import ca.uhn.fhir.jpa.bulk.svc.BulkExportDaoSvc;
|
||||
import ca.uhn.fhir.jpa.search.HapiLuceneAnalysisConfigurer;
|
||||
import ca.uhn.fhir.jpa.util.CircularQueueCaptureQueriesListener;
|
||||
import ca.uhn.fhir.jpa.util.CurrentThreadCaptureQueriesListener;
|
||||
import ca.uhn.fhir.rest.server.interceptor.RequestValidatingInterceptor;
|
||||
|
@ -15,6 +16,10 @@ import net.ttddyy.dsproxy.listener.logging.SLF4JLogLevel;
|
|||
import net.ttddyy.dsproxy.support.ProxyDataSourceBuilder;
|
||||
import org.apache.commons.dbcp2.BasicDataSource;
|
||||
import org.hibernate.dialect.H2Dialect;
|
||||
import org.hibernate.search.backend.lucene.cfg.LuceneBackendSettings;
|
||||
import org.hibernate.search.backend.lucene.cfg.LuceneIndexSettings;
|
||||
import org.hibernate.search.engine.cfg.BackendSettings;
|
||||
import org.hibernate.search.mapper.orm.cfg.HibernateOrmMapperSettings;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.context.annotation.Import;
|
||||
|
@ -27,6 +32,7 @@ import java.sql.Connection;
|
|||
import java.util.Properties;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import static ca.uhn.fhir.jpa.dao.BaseJpaTest.buildHeapLuceneHibernateSearchProperties;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
|
||||
@Configuration
|
||||
|
@ -160,11 +166,8 @@ public class TestR4Config extends BaseJavaConfigR4 {
|
|||
extraProperties.put("hibernate.show_sql", "false");
|
||||
extraProperties.put("hibernate.hbm2ddl.auto", "update");
|
||||
extraProperties.put("hibernate.dialect", H2Dialect.class.getName());
|
||||
extraProperties.put("hibernate.search.model_mapping", ca.uhn.fhir.jpa.search.LuceneSearchMappingFactory.class.getName());
|
||||
extraProperties.put("hibernate.search.default.directory_provider", "local-heap");
|
||||
extraProperties.put("hibernate.search.lucene_version", "LUCENE_CURRENT");
|
||||
extraProperties.put("hibernate.search.autoregister_listeners", "true");
|
||||
extraProperties.put("hibernate.temp.use_jdbc_metadata_defaults","false");
|
||||
|
||||
extraProperties.putAll(buildHeapLuceneHibernateSearchProperties());
|
||||
|
||||
return extraProperties;
|
||||
}
|
||||
|
|
|
@ -1,77 +1,60 @@
|
|||
package ca.uhn.fhir.jpa.config;
|
||||
|
||||
import ca.uhn.fhir.context.ConfigurationException;
|
||||
import ca.uhn.fhir.jpa.search.elastic.ElasticsearchHibernatePropertiesBuilder;
|
||||
import org.hibernate.search.elasticsearch.cfg.ElasticsearchIndexStatus;
|
||||
import org.hibernate.search.elasticsearch.cfg.IndexSchemaManagementStrategy;
|
||||
import ca.uhn.fhir.jpa.search.lastn.config.TestElasticsearchContainerHelper;
|
||||
import org.hibernate.search.backend.elasticsearch.index.IndexStatus;
|
||||
import org.hibernate.search.mapper.orm.schema.management.SchemaManagementStrategyName;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import pl.allegro.tech.embeddedelasticsearch.EmbeddedElastic;
|
||||
import pl.allegro.tech.embeddedelasticsearch.PopularProperties;
|
||||
import org.testcontainers.elasticsearch.ElasticsearchContainer;
|
||||
|
||||
import javax.annotation.PreDestroy;
|
||||
import java.io.IOException;
|
||||
import java.util.Properties;
|
||||
import java.util.UUID;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
|
||||
@Configuration
|
||||
public class TestR4ConfigWithElasticSearch extends TestR4Config {
|
||||
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(TestR4ConfigWithElasticSearch.class);
|
||||
private static final String ELASTIC_VERSION = "6.5.4";
|
||||
protected final String elasticsearchHost = "localhost";
|
||||
protected final String elasticsearchUserId = "";
|
||||
protected final String elasticsearchPassword = "";
|
||||
|
||||
|
||||
@Override
|
||||
@Bean
|
||||
public Properties jpaProperties() {
|
||||
Properties retVal = super.jpaProperties();
|
||||
|
||||
//Override default lucene settings
|
||||
// Force elasticsearch to start first
|
||||
int httpPort = embeddedElasticSearch().getHttpPort();
|
||||
ourLog.info("ElasticSearch started on port: {}", httpPort);
|
||||
int httpPort = elasticContainer().getMappedPort(9200);//9200 is the HTTP port
|
||||
String host = elasticContainer().getHost();
|
||||
|
||||
new ElasticsearchHibernatePropertiesBuilder()
|
||||
.setDebugRefreshAfterWrite(true)
|
||||
.setDebugIndexSyncStrategy("read-sync")
|
||||
.setDebugPrettyPrintJsonLog(true)
|
||||
.setIndexSchemaManagementStrategy(IndexSchemaManagementStrategy.CREATE)
|
||||
.setIndexSchemaManagementStrategy(SchemaManagementStrategyName.CREATE)
|
||||
.setIndexManagementWaitTimeoutMillis(10000)
|
||||
.setRequiredIndexStatus(ElasticsearchIndexStatus.YELLOW)
|
||||
.setRestUrl("http://"+ elasticsearchHost + ":" + httpPort)
|
||||
.setUsername(elasticsearchUserId)
|
||||
.setPassword(elasticsearchPassword)
|
||||
.setRequiredIndexStatus(IndexStatus.YELLOW)
|
||||
.setRestUrl(host+ ":" + httpPort)
|
||||
.setProtocol("http")
|
||||
.setUsername("")
|
||||
.setPassword("")
|
||||
.apply(retVal);
|
||||
|
||||
return retVal;
|
||||
}
|
||||
|
||||
@Bean
|
||||
public EmbeddedElastic embeddedElasticSearch() {
|
||||
EmbeddedElastic embeddedElastic = null;
|
||||
try {
|
||||
embeddedElastic = EmbeddedElastic.builder()
|
||||
.withElasticVersion(ELASTIC_VERSION)
|
||||
.withSetting(PopularProperties.TRANSPORT_TCP_PORT, 0)
|
||||
.withSetting(PopularProperties.HTTP_PORT, 0)
|
||||
.withSetting(PopularProperties.CLUSTER_NAME, UUID.randomUUID())
|
||||
.withStartTimeout(60, TimeUnit.SECONDS)
|
||||
.build()
|
||||
.start();
|
||||
} catch (IOException | InterruptedException e) {
|
||||
throw new ConfigurationException(e);
|
||||
}
|
||||
|
||||
return embeddedElastic;
|
||||
public ElasticsearchContainer elasticContainer() {
|
||||
ElasticsearchContainer embeddedElasticSearch = TestElasticsearchContainerHelper.getEmbeddedElasticSearch();
|
||||
embeddedElasticSearch.start();
|
||||
return embeddedElasticSearch;
|
||||
}
|
||||
|
||||
|
||||
@PreDestroy
|
||||
public void stop() {
|
||||
embeddedElasticSearch().stop();
|
||||
elasticContainer().stop();
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
package ca.uhn.fhir.jpa.config;
|
||||
|
||||
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
||||
import ca.uhn.fhir.jpa.search.lastn.ElasticsearchSvcImpl;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
|
@ -10,10 +11,17 @@ import java.io.IOException;
|
|||
@Configuration
|
||||
public class TestR4ConfigWithElasticsearchClient extends TestR4ConfigWithElasticSearch {
|
||||
|
||||
|
||||
@Bean
|
||||
public PartitionSettings partitionSettings() {
|
||||
return new PartitionSettings();
|
||||
}
|
||||
|
||||
@Bean()
|
||||
public ElasticsearchSvcImpl myElasticsearchSvc() {
|
||||
int elasticsearchPort = embeddedElasticSearch().getHttpPort();
|
||||
return new ElasticsearchSvcImpl(elasticsearchHost, elasticsearchPort, elasticsearchUserId, elasticsearchPassword);
|
||||
int elasticsearchPort = elasticContainer().getMappedPort(9200);
|
||||
String host = elasticContainer().getHost();
|
||||
return new ElasticsearchSvcImpl(host, elasticsearchPort, "", "");
|
||||
}
|
||||
|
||||
@PreDestroy
|
||||
|
|
|
@ -3,14 +3,14 @@ package ca.uhn.fhir.jpa.config;
|
|||
import java.util.Properties;
|
||||
|
||||
import org.hibernate.dialect.H2Dialect;
|
||||
import org.hibernate.jpa.HibernatePersistenceProvider;
|
||||
import org.springframework.beans.factory.annotation.Autowire;
|
||||
import org.hibernate.search.backend.lucene.cfg.LuceneBackendSettings;
|
||||
import org.hibernate.search.engine.cfg.BackendSettings;
|
||||
import org.hibernate.search.mapper.orm.cfg.HibernateOrmMapperSettings;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean;
|
||||
import org.springframework.transaction.annotation.EnableTransactionManagement;
|
||||
|
||||
import ca.uhn.fhir.jpa.dao.FulltextSearchSvcImpl;
|
||||
import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
|
||||
|
||||
@Configuration
|
||||
|
@ -41,7 +41,7 @@ public class TestR4WithLuceneDisabledConfig extends TestR4Config {
|
|||
extraProperties.put("hibernate.show_sql", "false");
|
||||
extraProperties.put("hibernate.hbm2ddl.auto", "update");
|
||||
extraProperties.put("hibernate.dialect", H2Dialect.class.getName());
|
||||
extraProperties.put("hibernate.search.autoregister_listeners", "false");
|
||||
extraProperties.put(HibernateOrmMapperSettings.ENABLED, "false");
|
||||
return extraProperties;
|
||||
}
|
||||
|
||||
|
|
|
@ -2,6 +2,7 @@ package ca.uhn.fhir.jpa.config;
|
|||
|
||||
import ca.uhn.fhir.jpa.binstore.IBinaryStorageSvc;
|
||||
import ca.uhn.fhir.jpa.binstore.MemoryBinaryStorageSvcImpl;
|
||||
import ca.uhn.fhir.jpa.search.HapiLuceneAnalysisConfigurer;
|
||||
import ca.uhn.fhir.jpa.util.CircularQueueCaptureQueriesListener;
|
||||
import ca.uhn.fhir.jpa.util.CurrentThreadCaptureQueriesListener;
|
||||
import ca.uhn.fhir.rest.server.interceptor.RequestValidatingInterceptor;
|
||||
|
@ -10,6 +11,10 @@ import net.ttddyy.dsproxy.listener.SingleQueryCountHolder;
|
|||
import net.ttddyy.dsproxy.support.ProxyDataSourceBuilder;
|
||||
import org.apache.commons.dbcp2.BasicDataSource;
|
||||
import org.hibernate.dialect.H2Dialect;
|
||||
import org.hibernate.search.backend.lucene.cfg.LuceneBackendSettings;
|
||||
import org.hibernate.search.backend.lucene.cfg.LuceneIndexSettings;
|
||||
import org.hibernate.search.engine.cfg.BackendSettings;
|
||||
import org.hibernate.search.mapper.orm.cfg.HibernateOrmMapperSettings;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
|
@ -23,6 +28,7 @@ import javax.sql.DataSource;
|
|||
import java.sql.Connection;
|
||||
import java.util.Properties;
|
||||
|
||||
import static ca.uhn.fhir.jpa.dao.BaseJpaTest.buildHeapLuceneHibernateSearchProperties;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
|
||||
@Configuration
|
||||
|
@ -146,10 +152,8 @@ public class TestR5Config extends BaseJavaConfigR5 {
|
|||
extraProperties.put("hibernate.show_sql", "false");
|
||||
extraProperties.put("hibernate.hbm2ddl.auto", "update");
|
||||
extraProperties.put("hibernate.dialect", H2Dialect.class.getName());
|
||||
extraProperties.put("hibernate.search.model_mapping", ca.uhn.fhir.jpa.search.LuceneSearchMappingFactory.class.getName());
|
||||
extraProperties.put("hibernate.search.default.directory_provider", "local-heap");
|
||||
extraProperties.put("hibernate.search.lucene_version", "LUCENE_CURRENT");
|
||||
extraProperties.put("hibernate.search.autoregister_listeners", "true");
|
||||
|
||||
extraProperties.putAll(buildHeapLuceneHibernateSearchProperties());
|
||||
|
||||
return extraProperties;
|
||||
}
|
||||
|
|
|
@ -13,10 +13,14 @@ import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc;
|
|||
import ca.uhn.fhir.jpa.config.BaseConfig;
|
||||
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
|
||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||
import ca.uhn.fhir.jpa.entity.TermValueSet;
|
||||
import ca.uhn.fhir.jpa.entity.TermValueSetConcept;
|
||||
import ca.uhn.fhir.jpa.entity.TermValueSetConceptDesignation;
|
||||
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||
import ca.uhn.fhir.jpa.partition.IPartitionLookupSvc;
|
||||
import ca.uhn.fhir.jpa.provider.SystemProviderDstu2Test;
|
||||
import ca.uhn.fhir.jpa.search.DatabaseBackedPagingProvider;
|
||||
import ca.uhn.fhir.jpa.search.HapiLuceneAnalysisConfigurer;
|
||||
import ca.uhn.fhir.jpa.search.PersistedJpaBundleProvider;
|
||||
import ca.uhn.fhir.jpa.search.cache.ISearchCacheSvc;
|
||||
import ca.uhn.fhir.jpa.search.cache.ISearchResultCacheSvc;
|
||||
|
@ -24,6 +28,7 @@ import ca.uhn.fhir.jpa.search.reindex.IResourceReindexingSvc;
|
|||
import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry;
|
||||
import ca.uhn.fhir.jpa.subscription.match.registry.SubscriptionLoader;
|
||||
import ca.uhn.fhir.jpa.subscription.match.registry.SubscriptionRegistry;
|
||||
import ca.uhn.fhir.jpa.term.ValueSetExpansionR4Test;
|
||||
import ca.uhn.fhir.jpa.util.CircularQueueCaptureQueriesListener;
|
||||
import ca.uhn.fhir.jpa.util.MemoryCacheService;
|
||||
import ca.uhn.fhir.model.dstu2.resource.Bundle;
|
||||
|
@ -45,6 +50,11 @@ import org.apache.commons.io.IOUtils;
|
|||
import org.hibernate.HibernateException;
|
||||
import org.hibernate.Session;
|
||||
import org.hibernate.SessionFactory;
|
||||
import org.hibernate.jdbc.Work;
|
||||
import org.hibernate.search.backend.lucene.cfg.LuceneBackendSettings;
|
||||
import org.hibernate.search.backend.lucene.cfg.LuceneIndexSettings;
|
||||
import org.hibernate.search.engine.cfg.BackendSettings;
|
||||
import org.hibernate.search.mapper.orm.cfg.HibernateOrmMapperSettings;
|
||||
import org.hl7.fhir.common.hapi.validation.validator.FhirInstanceValidator;
|
||||
import org.hl7.fhir.dstu3.model.Bundle.BundleEntryComponent;
|
||||
import org.hl7.fhir.dstu3.model.Resource;
|
||||
|
@ -61,6 +71,7 @@ import org.mockito.Mock;
|
|||
import org.mockito.MockitoAnnotations;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.orm.jpa.JpaTransactionManager;
|
||||
import org.springframework.test.context.TestPropertySource;
|
||||
import org.springframework.transaction.PlatformTransactionManager;
|
||||
|
@ -76,14 +87,18 @@ import java.util.ArrayList;
|
|||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.Callable;
|
||||
import java.util.concurrent.CountDownLatch;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import static ca.uhn.fhir.util.TestUtil.randomizeLocale;
|
||||
import static org.awaitility.Awaitility.await;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
import static org.mockito.ArgumentMatchers.eq;
|
||||
|
@ -111,6 +126,16 @@ public abstract class BaseJpaTest extends BaseTest {
|
|||
TestUtil.setShouldRandomizeTimezones(false);
|
||||
}
|
||||
|
||||
public static Map<?,?> buildHeapLuceneHibernateSearchProperties() {
|
||||
Map<String, String> props = new HashMap<>();
|
||||
props.put(BackendSettings.backendKey(BackendSettings.TYPE), "lucene");
|
||||
props.put(BackendSettings.backendKey(LuceneBackendSettings.ANALYSIS_CONFIGURER), HapiLuceneAnalysisConfigurer.class.getName());
|
||||
props.put(BackendSettings.backendKey(LuceneIndexSettings.DIRECTORY_TYPE), "local-heap");
|
||||
props.put(BackendSettings.backendKey(LuceneBackendSettings.LUCENE_VERSION), "LUCENE_CURRENT");
|
||||
props.put(HibernateOrmMapperSettings.ENABLED, "true");
|
||||
return props;
|
||||
}
|
||||
|
||||
@RegisterExtension
|
||||
public LoggingExtension myLoggingExtension = new LoggingExtension();
|
||||
@Mock(answer = Answers.RETURNS_DEEP_STUBS)
|
||||
|
@ -580,4 +605,63 @@ public abstract class BaseJpaTest extends BaseTest {
|
|||
Thread.sleep(500);
|
||||
}
|
||||
|
||||
protected TermValueSetConceptDesignation assertTermConceptContainsDesignation(TermValueSetConcept theConcept, String theLanguage, String theUseSystem, String theUseCode, String theUseDisplay, String theDesignationValue) {
|
||||
Stream<TermValueSetConceptDesignation> stream = theConcept.getDesignations().stream();
|
||||
if (theLanguage != null) {
|
||||
stream = stream.filter(designation -> theLanguage.equalsIgnoreCase(designation.getLanguage()));
|
||||
}
|
||||
if (theUseSystem != null) {
|
||||
stream = stream.filter(designation -> theUseSystem.equalsIgnoreCase(designation.getUseSystem()));
|
||||
}
|
||||
if (theUseCode != null) {
|
||||
stream = stream.filter(designation -> theUseCode.equalsIgnoreCase(designation.getUseCode()));
|
||||
}
|
||||
if (theUseDisplay != null) {
|
||||
stream = stream.filter(designation -> theUseDisplay.equalsIgnoreCase(designation.getUseDisplay()));
|
||||
}
|
||||
if (theDesignationValue != null) {
|
||||
stream = stream.filter(designation -> theDesignationValue.equalsIgnoreCase(designation.getValue()));
|
||||
}
|
||||
|
||||
Optional<TermValueSetConceptDesignation> first = stream.findFirst();
|
||||
if (!first.isPresent()) {
|
||||
String failureMessage = String.format("Concept %s did not contain designation [%s|%s|%s|%s|%s] ", theConcept.toString(), theLanguage, theUseSystem, theUseCode, theUseDisplay, theDesignationValue);
|
||||
fail(failureMessage);
|
||||
return null;
|
||||
} else {
|
||||
return first.get();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
protected TermValueSetConcept assertTermValueSetContainsConceptAndIsInDeclaredOrder(TermValueSet theValueSet, String theSystem, String theCode, String theDisplay, Integer theDesignationCount) {
|
||||
List<TermValueSetConcept> contains = theValueSet.getConcepts();
|
||||
|
||||
Stream<TermValueSetConcept> stream = contains.stream();
|
||||
if (theSystem != null) {
|
||||
stream = stream.filter(concept -> theSystem.equalsIgnoreCase(concept.getSystem()));
|
||||
}
|
||||
if (theCode != null ) {
|
||||
stream = stream.filter(concept -> theCode.equalsIgnoreCase(concept.getCode()));
|
||||
}
|
||||
if (theDisplay != null){
|
||||
stream = stream.filter(concept -> theDisplay.equalsIgnoreCase(concept.getDisplay()));
|
||||
}
|
||||
if (theDesignationCount != null) {
|
||||
stream = stream.filter(concept -> concept.getDesignations().size() == theDesignationCount);
|
||||
}
|
||||
|
||||
Optional<TermValueSetConcept> first = stream.findFirst();
|
||||
if (!first.isPresent()) {
|
||||
String failureMessage = String.format("Expanded ValueSet %s did not contain concept [%s|%s|%s] with [%d] designations", theValueSet.getId(), theSystem, theCode, theDisplay, theDesignationCount);
|
||||
fail(failureMessage);
|
||||
return null;
|
||||
} else {
|
||||
TermValueSetConcept termValueSetConcept = first.get();
|
||||
assertEquals(termValueSetConcept.getOrder(), theValueSet.getConcepts().indexOf(termValueSetConcept));
|
||||
return termValueSetConcept;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -62,8 +62,8 @@ import ca.uhn.fhir.rest.api.EncodingEnum;
|
|||
import ca.uhn.fhir.rest.server.provider.ResourceProviderFactory;
|
||||
import ca.uhn.fhir.util.TestUtil;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.hibernate.search.jpa.FullTextEntityManager;
|
||||
import org.hibernate.search.jpa.Search;
|
||||
import org.hibernate.search.mapper.orm.Search;
|
||||
import org.hibernate.search.mapper.orm.session.SearchSession;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.junit.jupiter.api.AfterAll;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
|
@ -226,10 +226,10 @@ public abstract class BaseJpaDstu2Test extends BaseJpaTest {
|
|||
@BeforeEach
|
||||
public void beforeFlushFT() {
|
||||
runInTransaction(() -> {
|
||||
FullTextEntityManager ftem = Search.getFullTextEntityManager(myEntityManager);
|
||||
ftem.purgeAll(ResourceTable.class);
|
||||
ftem.purgeAll(ResourceIndexedSearchParamString.class);
|
||||
ftem.flushToIndexes();
|
||||
SearchSession searchSession = Search.session(myEntityManager);
|
||||
searchSession.workspace(ResourceTable.class).purge();
|
||||
// searchSession.workspace(ResourceIndexedSearchParamString.class).purge();
|
||||
searchSession.indexingPlan().execute();
|
||||
});
|
||||
|
||||
myDaoConfig.setSchedulingDisabled(true);
|
||||
|
|
|
@ -16,7 +16,6 @@ import org.junit.jupiter.api.Test;
|
|||
import org.springframework.transaction.TransactionStatus;
|
||||
import org.springframework.transaction.support.TransactionCallbackWithoutResult;
|
||||
|
||||
import ca.uhn.fhir.jpa.dao.FulltextSearchSvcImpl.Suggestion;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.model.dstu2.resource.*;
|
||||
import ca.uhn.fhir.model.primitive.Base64BinaryDt;
|
||||
|
@ -33,114 +32,6 @@ public class FhirResourceDaoDstu2SearchFtTest extends BaseJpaDstu2Test {
|
|||
myDaoConfig.setReuseCachedSearchResultsForMillis(null);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSuggestIgnoresBase64Content() {
|
||||
Patient patient = new Patient();
|
||||
patient.addName().addFamily("testSuggest");
|
||||
IIdType ptId = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
|
||||
|
||||
Media med = new Media();
|
||||
med.getSubject().setReference(ptId);
|
||||
med.getSubtype().setText("Systolic Blood Pressure");
|
||||
med.getContent().setContentType("LCws");
|
||||
med.getContent().setData(new Base64BinaryDt(new byte[] { 44, 44, 44, 44, 44, 44, 44, 44 }));
|
||||
med.getContent().setTitle("bbbb syst");
|
||||
myMediaDao.create(med, mySrd);
|
||||
ourLog.info(myFhirCtx.newJsonParser().encodeResourceToString(med));
|
||||
|
||||
List<Suggestion> output = mySearchDao.suggestKeywords("Patient/" + ptId.getIdPart() + "/$everything", "_content", "press", null);
|
||||
ourLog.info("Found: " + output);
|
||||
assertEquals(2, output.size());
|
||||
assertEquals("Pressure", output.get(0).getTerm());
|
||||
assertEquals("Systolic Blood Pressure", output.get(1).getTerm());
|
||||
|
||||
output = mySearchDao.suggestKeywords("Patient/" + ptId.getIdPart() + "/$everything", "_content", "prezure", null);
|
||||
ourLog.info("Found: " + output);
|
||||
assertEquals(2, output.size());
|
||||
assertEquals("Pressure", output.get(0).getTerm());
|
||||
assertEquals("Systolic Blood Pressure", output.get(1).getTerm());
|
||||
|
||||
output = mySearchDao.suggestKeywords("Patient/" + ptId.getIdPart() + "/$everything", "_content", "syst", null);
|
||||
ourLog.info("Found: " + output);
|
||||
assertEquals(4, output.size());
|
||||
assertEquals("syst", output.get(0).getTerm());
|
||||
assertEquals("bbbb syst", output.get(1).getTerm());
|
||||
assertEquals("Systolic", output.get(2).getTerm());
|
||||
assertEquals("Systolic Blood Pressure", output.get(3).getTerm());
|
||||
|
||||
output = mySearchDao.suggestKeywords("Patient/" + ptId.getIdPart() + "/$everything", "_content", "LCws", null);
|
||||
ourLog.info("Found: " + output);
|
||||
assertEquals(0, output.size());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSuggest() {
|
||||
Patient patient = new Patient();
|
||||
patient.addName().addFamily("testSuggest");
|
||||
IIdType ptId = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
|
||||
|
||||
Observation obs = new Observation();
|
||||
obs.getSubject().setReference(ptId);
|
||||
obs.getCode().setText("ZXCVBNM ASDFGHJKL QWERTYUIOPASDFGHJKL");
|
||||
myObservationDao.create(obs, mySrd);
|
||||
|
||||
obs = new Observation();
|
||||
obs.getSubject().setReference(ptId);
|
||||
obs.getCode().setText("MNBVCXZ");
|
||||
myObservationDao.create(obs, mySrd);
|
||||
|
||||
obs = new Observation();
|
||||
obs.getSubject().setReference(ptId);
|
||||
obs.getCode().setText("ZXC HELLO");
|
||||
obs.addComponent().getCode().setText("HHHHHHHHHH");
|
||||
myObservationDao.create(obs, mySrd);
|
||||
|
||||
/*
|
||||
* These shouldn't match since they're for another patient
|
||||
*/
|
||||
patient = new Patient();
|
||||
patient.addName().addFamily("testSuggest2");
|
||||
IIdType ptId2 = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
|
||||
|
||||
Observation obs2 = new Observation();
|
||||
obs2.getSubject().setReference(ptId2);
|
||||
obs2.getCode().setText("ZXCVBNMZZ");
|
||||
myObservationDao.create(obs2, mySrd);
|
||||
|
||||
List<Suggestion> output = mySearchDao.suggestKeywords("Patient/" + ptId.getIdPart() + "/$everything", "_content", "ZXCVBNM", null);
|
||||
ourLog.info("Found: " + output);
|
||||
assertEquals(4, output.size());
|
||||
assertEquals("ZXCVBNM", output.get(0).getTerm());
|
||||
assertEquals("ZXCVBNM ASDFGHJKL QWERTYUIOPASDFGHJKL", output.get(1).getTerm());
|
||||
assertEquals("ZXC", output.get(2).getTerm());
|
||||
assertEquals("ZXC HELLO", output.get(3).getTerm());
|
||||
|
||||
output = mySearchDao.suggestKeywords("Patient/" + ptId.getIdPart() + "/$everything", "_content", "ZXC", null);
|
||||
ourLog.info("Found: " + output);
|
||||
assertEquals(4, output.size());
|
||||
assertEquals("ZXC", output.get(0).getTerm());
|
||||
assertEquals("ZXC HELLO", output.get(1).getTerm());
|
||||
assertEquals("ZXCVBNM", output.get(2).getTerm());
|
||||
assertEquals("ZXCVBNM ASDFGHJKL QWERTYUIOPASDFGHJKL", output.get(3).getTerm());
|
||||
|
||||
output = mySearchDao.suggestKeywords("Patient/" + ptId.getIdPart() + "/$everything", "_content", "HELO", null);
|
||||
ourLog.info("Found: " + output);
|
||||
assertEquals(2, output.size());
|
||||
assertEquals("HELLO", output.get(0).getTerm());
|
||||
assertEquals("ZXC HELLO", output.get(1).getTerm());
|
||||
|
||||
output = mySearchDao.suggestKeywords("Patient/" + ptId.getIdPart() + "/$everything", "_content", "Z", null);
|
||||
ourLog.info("Found: " + output);
|
||||
assertEquals(0, output.size());
|
||||
|
||||
output = mySearchDao.suggestKeywords("Patient/" + ptId.getIdPart() + "/$everything", "_content", "ZX", null);
|
||||
ourLog.info("Found: " + output);
|
||||
assertEquals(2, output.size());
|
||||
assertEquals("ZXC", output.get(0).getTerm());
|
||||
assertEquals("ZXC HELLO", output.get(1).getTerm());
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSearchAndReindex() {
|
||||
SearchParameterMap map;
|
||||
|
|
|
@ -1165,7 +1165,6 @@ public class FhirSystemDaoDstu2Test extends BaseJpaDstu2SystemTest {
|
|||
assertEquals("204 No Content", resp.getEntry().get(3).getResponse().getStatus());
|
||||
}
|
||||
|
||||
|
||||
Bundle respGetBundle = (Bundle) resp.getEntry().get(0).getResource();
|
||||
assertEquals(1, respGetBundle.getEntry().size());
|
||||
assertEquals("testTransactionOrdering" + pass, ((Patient) respGetBundle.getEntry().get(0).getResource()).getNameFirstRep().getFamilyFirstRep().getValue());
|
||||
|
|
|
@ -25,6 +25,7 @@ import ca.uhn.fhir.jpa.dao.data.IResourceTableDao;
|
|||
import ca.uhn.fhir.jpa.dao.data.IResourceTagDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ITagDefinitionDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemVersionDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ITermConceptDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ITermConceptMapDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ITermConceptMapGroupElementTargetDao;
|
||||
|
@ -54,8 +55,8 @@ import ca.uhn.fhir.rest.server.exceptions.InternalErrorException;
|
|||
import ca.uhn.fhir.rest.server.provider.ResourceProviderFactory;
|
||||
import ca.uhn.fhir.util.UrlUtil;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.hibernate.search.jpa.FullTextEntityManager;
|
||||
import org.hibernate.search.jpa.Search;
|
||||
import org.hibernate.search.mapper.orm.Search;
|
||||
import org.hibernate.search.mapper.orm.session.SearchSession;
|
||||
import org.hl7.fhir.dstu3.model.AllergyIntolerance;
|
||||
import org.hl7.fhir.dstu3.model.Appointment;
|
||||
import org.hl7.fhir.dstu3.model.AuditEvent;
|
||||
|
@ -319,6 +320,8 @@ public abstract class BaseJpaDstu3Test extends BaseJpaTest {
|
|||
@Autowired
|
||||
protected ITermCodeSystemDao myTermCodeSystemDao;
|
||||
@Autowired
|
||||
protected ITermCodeSystemVersionDao myTermCodeSystemVersionDao;
|
||||
@Autowired
|
||||
protected ITermReadSvc myTermSvc;
|
||||
@Autowired
|
||||
protected PlatformTransactionManager myTransactionMgr;
|
||||
|
@ -375,10 +378,10 @@ public abstract class BaseJpaDstu3Test extends BaseJpaTest {
|
|||
@BeforeEach
|
||||
public void beforeFlushFT() {
|
||||
runInTransaction(() -> {
|
||||
FullTextEntityManager ftem = Search.getFullTextEntityManager(myEntityManager);
|
||||
ftem.purgeAll(ResourceTable.class);
|
||||
ftem.purgeAll(ResourceIndexedSearchParamString.class);
|
||||
ftem.flushToIndexes();
|
||||
SearchSession searchSession = Search.session(myEntityManager);
|
||||
searchSession.workspace(ResourceTable.class).purge();
|
||||
// searchSession.workspace(ResourceIndexedSearchParamString.class).purge();
|
||||
searchSession.indexingPlan().execute();
|
||||
});
|
||||
|
||||
myDaoConfig.setSchedulingDisabled(true);
|
||||
|
|
|
@ -63,10 +63,24 @@ public class FhirResourceDaoDstu3CodeSystemTest extends BaseJpaDstu3Test {
|
|||
cs.addConcept().setCode("A");
|
||||
cs.addConcept().setCode("B");
|
||||
myCodeSystemDao.update(cs, mySrd);
|
||||
myTerminologyDeferredStorageSvc.saveAllDeferred();
|
||||
runInTransaction(()->{
|
||||
assertEquals(2, myConceptDao.count());
|
||||
});
|
||||
|
||||
// Update the code system to reduce the count again
|
||||
cs = new CodeSystem();
|
||||
cs.setId(id);
|
||||
cs.setUrl("http://foo");
|
||||
cs.setContent(CodeSystem.CodeSystemContentMode.COMPLETE);
|
||||
cs.setStatus(Enumerations.PublicationStatus.ACTIVE);
|
||||
cs.addConcept().setCode("C");
|
||||
myCodeSystemDao.update(cs, mySrd);
|
||||
myTerminologyDeferredStorageSvc.saveAllDeferred();
|
||||
runInTransaction(()->{
|
||||
assertEquals(1, myConceptDao.count());
|
||||
});
|
||||
|
||||
// Delete the code system
|
||||
runInTransaction(()->{
|
||||
myCodeSystemDao.delete(id);
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
package ca.uhn.fhir.jpa.dao.dstu3;
|
||||
|
||||
import ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.dao.FulltextSearchSvcImpl.Suggestion;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.param.StringAndListParam;
|
||||
|
@ -141,116 +140,6 @@ public class FhirResourceDaoDstu3SearchFtTest extends BaseJpaDstu3Test {
|
|||
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testSuggestIgnoresBase64Content() {
|
||||
Patient patient = new Patient();
|
||||
patient.addName().setFamily("testSuggest");
|
||||
IIdType ptId = myPatientDao.create(patient, mockSrd()).getId().toUnqualifiedVersionless();
|
||||
|
||||
Media med = new Media();
|
||||
med.getSubject().setReferenceElement(ptId);
|
||||
med.getSubtype().setText("Systolic Blood Pressure");
|
||||
med.getContent().setContentType("LCws");
|
||||
med.getContent().setDataElement(new Base64BinaryType(new byte[] {44,44,44,44,44,44,44,44}));
|
||||
med.getContent().setTitle("bbbb syst");
|
||||
myMediaDao.create(med, mockSrd());
|
||||
ourLog.info(myFhirCtx.newJsonParser().encodeResourceToString(med));
|
||||
|
||||
List<Suggestion> output = mySearchDao.suggestKeywords("Patient/" + ptId.getIdPart() + "/$everything", "_content", "press", null);
|
||||
ourLog.info("Found: " + output);
|
||||
assertEquals(2, output.size());
|
||||
assertEquals("Pressure", output.get(0).getTerm());
|
||||
assertEquals("Systolic Blood Pressure", output.get(1).getTerm());
|
||||
|
||||
output = mySearchDao.suggestKeywords("Patient/" + ptId.getIdPart() + "/$everything", "_content", "prezure", null);
|
||||
ourLog.info("Found: " + output);
|
||||
assertEquals(2, output.size());
|
||||
assertEquals("Pressure", output.get(0).getTerm());
|
||||
assertEquals("Systolic Blood Pressure", output.get(1).getTerm());
|
||||
|
||||
output = mySearchDao.suggestKeywords("Patient/" + ptId.getIdPart() + "/$everything", "_content", "syst", null);
|
||||
ourLog.info("Found: " + output);
|
||||
assertEquals(4, output.size());
|
||||
assertEquals("syst", output.get(0).getTerm());
|
||||
assertEquals("bbbb syst", output.get(1).getTerm());
|
||||
assertEquals("Systolic", output.get(2).getTerm());
|
||||
assertEquals("Systolic Blood Pressure", output.get(3).getTerm());
|
||||
|
||||
output = mySearchDao.suggestKeywords("Patient/" + ptId.getIdPart() + "/$everything", "_content", "LCws", null);
|
||||
ourLog.info("Found: " + output);
|
||||
assertEquals(0, output.size());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSuggest() {
|
||||
Patient patient = new Patient();
|
||||
patient.addName().setFamily("testSuggest");
|
||||
IIdType ptId = myPatientDao.create(patient, mockSrd()).getId().toUnqualifiedVersionless();
|
||||
|
||||
Observation obs = new Observation();
|
||||
obs.getSubject().setReferenceElement(ptId);
|
||||
obs.getCode().setText("ZXCVBNM ASDFGHJKL QWERTYUIOPASDFGHJKL");
|
||||
myObservationDao.create(obs, mockSrd());
|
||||
|
||||
obs = new Observation();
|
||||
obs.getSubject().setReferenceElement(ptId);
|
||||
obs.getCode().setText("MNBVCXZ");
|
||||
myObservationDao.create(obs, mockSrd());
|
||||
|
||||
obs = new Observation();
|
||||
obs.getSubject().setReferenceElement(ptId);
|
||||
obs.getCode().setText("ZXC HELLO");
|
||||
obs.addComponent().getCode().setText("HHHHHHHHHH");
|
||||
myObservationDao.create(obs, mockSrd());
|
||||
|
||||
/*
|
||||
* These shouldn't match since they're for another patient
|
||||
*/
|
||||
patient = new Patient();
|
||||
patient.addName().setFamily("testSuggest2");
|
||||
IIdType ptId2 = myPatientDao.create(patient, mockSrd()).getId().toUnqualifiedVersionless();
|
||||
|
||||
Observation obs2 = new Observation();
|
||||
obs2.getSubject().setReferenceElement(ptId2);
|
||||
obs2.getCode().setText("ZXCVBNMZZ");
|
||||
myObservationDao.create(obs2, mockSrd());
|
||||
|
||||
List<Suggestion> output = mySearchDao.suggestKeywords("Patient/" + ptId.getIdPart() + "/$everything", "_content", "ZXCVBNM", null);
|
||||
ourLog.info("Found: " + output);
|
||||
assertEquals(4, output.size());
|
||||
assertEquals("ZXCVBNM", output.get(0).getTerm());
|
||||
assertEquals("ZXCVBNM ASDFGHJKL QWERTYUIOPASDFGHJKL", output.get(1).getTerm());
|
||||
assertEquals("ZXC", output.get(2).getTerm());
|
||||
assertEquals("ZXC HELLO", output.get(3).getTerm());
|
||||
|
||||
output = mySearchDao.suggestKeywords("Patient/" + ptId.getIdPart() + "/$everything", "_content", "ZXC", null);
|
||||
ourLog.info("Found: " + output);
|
||||
assertEquals(4, output.size());
|
||||
assertEquals("ZXC", output.get(0).getTerm());
|
||||
assertEquals("ZXC HELLO", output.get(1).getTerm());
|
||||
assertEquals("ZXCVBNM", output.get(2).getTerm());
|
||||
assertEquals("ZXCVBNM ASDFGHJKL QWERTYUIOPASDFGHJKL", output.get(3).getTerm());
|
||||
|
||||
output = mySearchDao.suggestKeywords("Patient/" + ptId.getIdPart() + "/$everything", "_content", "HELO", null);
|
||||
ourLog.info("Found: " + output);
|
||||
assertEquals(2, output.size());
|
||||
assertEquals("HELLO", output.get(0).getTerm());
|
||||
assertEquals("ZXC HELLO", output.get(1).getTerm());
|
||||
|
||||
output = mySearchDao.suggestKeywords("Patient/" + ptId.getIdPart() + "/$everything", "_content", "Z", null);
|
||||
ourLog.info("Found: " + output);
|
||||
assertEquals(0, output.size());
|
||||
|
||||
output = mySearchDao.suggestKeywords("Patient/" + ptId.getIdPart() + "/$everything", "_content", "ZX", null);
|
||||
ourLog.info("Found: " + output);
|
||||
assertEquals(2, output.size());
|
||||
assertEquals("ZXC", output.get(0).getTerm());
|
||||
assertEquals("ZXC HELLO", output.get(1).getTerm());
|
||||
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testSearchAndReindex() {
|
||||
Patient patient;
|
||||
|
|
|
@ -3,7 +3,7 @@ package ca.uhn.fhir.jpa.dao.predicate;
|
|||
import ca.uhn.fhir.jpa.dao.LegacySearchBuilder;
|
||||
import ca.uhn.fhir.jpa.util.CoordCalculator;
|
||||
import ca.uhn.fhir.jpa.util.CoordCalculatorTest;
|
||||
import ca.uhn.fhir.jpa.util.SearchBox;
|
||||
import org.hibernate.search.engine.spatial.GeoBoundingBox;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
|
@ -41,8 +41,8 @@ public class PredicateBuilderCoordsTest {
|
|||
|
||||
@Test
|
||||
public void testLongitudePredicateFromBox() {
|
||||
SearchBox box = CoordCalculator.getBox(CoordCalculatorTest.LATITUDE_CHIN, CoordCalculatorTest.LONGITUDE_CHIN, CoordCalculatorTest.DISTANCE_TAVEUNI);
|
||||
assertThat(box.getNorthEast().getLongitude(), greaterThan(box.getSouthWest().getLongitude()));
|
||||
GeoBoundingBox box = CoordCalculator.getBox(CoordCalculatorTest.LATITUDE_CHIN, CoordCalculatorTest.LONGITUDE_CHIN, CoordCalculatorTest.DISTANCE_TAVEUNI);
|
||||
assertThat(box.bottomRight().longitude(), greaterThan(box.topLeft().longitude()));
|
||||
|
||||
ArgumentCaptor<Predicate> andLeft = ArgumentCaptor.forClass(Predicate.class);
|
||||
ArgumentCaptor<Predicate> andRight = ArgumentCaptor.forClass(Predicate.class);
|
||||
|
@ -58,15 +58,15 @@ public class PredicateBuilderCoordsTest {
|
|||
verify(myBuilder).and(andLeft.capture(), andRight.capture());
|
||||
assertEquals(andLeft.getValue(), gte);
|
||||
assertEquals(andRight.getValue(), lte);
|
||||
assertEquals(gteValue.getValue(), box.getSouthWest().getLongitude());
|
||||
assertEquals(lteValue.getValue(), box.getNorthEast().getLongitude());
|
||||
assertEquals(gteValue.getValue(), box.topLeft().longitude());
|
||||
assertEquals(lteValue.getValue(), box.bottomRight().longitude());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAntiMeridianLongitudePredicateFromBox() {
|
||||
SearchBox box = CoordCalculator.getBox(CoordCalculatorTest.LATITUDE_TAVEUNI, CoordCalculatorTest.LONGITIDE_TAVEUNI, CoordCalculatorTest.DISTANCE_TAVEUNI);
|
||||
assertThat(box.getNorthEast().getLongitude(), lessThan(box.getSouthWest().getLongitude()));
|
||||
assertTrue(box.crossesAntiMeridian());
|
||||
GeoBoundingBox box = CoordCalculator.getBox(CoordCalculatorTest.LATITUDE_TAVEUNI, CoordCalculatorTest.LONGITIDE_TAVEUNI, CoordCalculatorTest.DISTANCE_TAVEUNI);
|
||||
assertThat(box.bottomRight().longitude(), lessThan(box.topLeft().longitude()));
|
||||
assertTrue(box.bottomRight().longitude() < box.topLeft().longitude());
|
||||
|
||||
ArgumentCaptor<Predicate> orLeft = ArgumentCaptor.forClass(Predicate.class);
|
||||
ArgumentCaptor<Predicate> orRight = ArgumentCaptor.forClass(Predicate.class);
|
||||
|
@ -82,8 +82,8 @@ public class PredicateBuilderCoordsTest {
|
|||
verify(myBuilder).or(orLeft.capture(), orRight.capture());
|
||||
assertEquals(orLeft.getValue(), gte);
|
||||
assertEquals(orRight.getValue(), lte);
|
||||
assertEquals(gteValue.getValue(), box.getNorthEast().getLongitude());
|
||||
assertEquals(lteValue.getValue(), box.getSouthWest().getLongitude());
|
||||
assertEquals(gteValue.getValue(), box.bottomRight().longitude());
|
||||
assertEquals(lteValue.getValue(), box.topLeft().longitude());
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -54,6 +54,9 @@ import ca.uhn.fhir.jpa.dao.index.IdHelperService;
|
|||
import ca.uhn.fhir.jpa.entity.TermCodeSystem;
|
||||
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
|
||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||
import ca.uhn.fhir.jpa.entity.TermValueSet;
|
||||
import ca.uhn.fhir.jpa.entity.TermValueSetConcept;
|
||||
import ca.uhn.fhir.jpa.entity.TermValueSetConceptDesignation;
|
||||
import ca.uhn.fhir.jpa.interceptor.PerformanceTracingLoggingInterceptor;
|
||||
import ca.uhn.fhir.jpa.model.config.PartitionSettings;
|
||||
import ca.uhn.fhir.jpa.model.entity.ModelConfig;
|
||||
|
@ -68,6 +71,7 @@ import ca.uhn.fhir.jpa.search.warm.ICacheWarmingSvc;
|
|||
import ca.uhn.fhir.jpa.searchparam.registry.SearchParamRegistryImpl;
|
||||
import ca.uhn.fhir.jpa.term.BaseTermReadSvcImpl;
|
||||
import ca.uhn.fhir.jpa.term.TermDeferredStorageSvcImpl;
|
||||
import ca.uhn.fhir.jpa.term.ValueSetExpansionR4Test;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermCodeSystemStorageSvc;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermDeferredStorageSvc;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermLoaderSvc;
|
||||
|
@ -87,8 +91,8 @@ import ca.uhn.fhir.util.UrlUtil;
|
|||
import ca.uhn.fhir.validation.FhirValidator;
|
||||
import ca.uhn.fhir.validation.ValidationResult;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.hibernate.search.jpa.FullTextEntityManager;
|
||||
import org.hibernate.search.jpa.Search;
|
||||
import org.hibernate.search.mapper.orm.Search;
|
||||
import org.hibernate.search.mapper.orm.session.SearchSession;
|
||||
import org.hl7.fhir.common.hapi.validation.support.CachingValidationSupport;
|
||||
import org.hl7.fhir.common.hapi.validation.validator.FhirInstanceValidator;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
|
@ -159,6 +163,7 @@ import org.junit.jupiter.api.extension.ExtendWith;
|
|||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.junit.jupiter.SpringExtension;
|
||||
import org.springframework.test.util.AopTestUtils;
|
||||
|
@ -170,11 +175,14 @@ import java.io.InputStream;
|
|||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.contains;
|
||||
import static org.hamcrest.Matchers.empty;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
import static org.mockito.Mockito.mock;
|
||||
|
||||
|
@ -524,10 +532,10 @@ public abstract class BaseJpaR4Test extends BaseJpaTest implements ITestDataBuil
|
|||
@BeforeEach
|
||||
public void beforeFlushFT() {
|
||||
runInTransaction(() -> {
|
||||
FullTextEntityManager ftem = Search.getFullTextEntityManager(myEntityManager);
|
||||
ftem.purgeAll(ResourceTable.class);
|
||||
ftem.purgeAll(ResourceIndexedSearchParamString.class);
|
||||
ftem.flushToIndexes();
|
||||
SearchSession searchSession = Search.session(myEntityManager);
|
||||
searchSession.workspace(ResourceTable.class).purge();
|
||||
// searchSession.workspace(ResourceIndexedSearchParamString.class).purge();
|
||||
searchSession.indexingPlan().execute();
|
||||
});
|
||||
|
||||
myDaoConfig.setSchedulingDisabled(true);
|
||||
|
@ -751,4 +759,68 @@ public abstract class BaseJpaR4Test extends BaseJpaTest implements ITestDataBuil
|
|||
return uuid;
|
||||
}
|
||||
|
||||
|
||||
protected ValueSet.ConceptReferenceDesignationComponent assertConceptContainsDesignation(ValueSet.ValueSetExpansionContainsComponent theConcept, String theLanguage, String theUseSystem, String theUseCode, String theUseDisplay, String theDesignationValue) {
|
||||
Stream<ValueSet.ConceptReferenceDesignationComponent> stream = theConcept.getDesignation().stream();
|
||||
if (theLanguage != null) {
|
||||
stream = stream.filter(designation -> theLanguage.equalsIgnoreCase(designation.getLanguage()));
|
||||
}
|
||||
if (theUseSystem != null) {
|
||||
stream = stream.filter(designation -> theUseSystem.equalsIgnoreCase(designation.getUse().getSystem()));
|
||||
}
|
||||
if (theUseCode != null) {
|
||||
stream = stream.filter(designation -> theUseCode.equalsIgnoreCase(designation.getUse().getCode()));
|
||||
}
|
||||
if (theUseDisplay != null) {
|
||||
stream = stream.filter(designation -> theUseDisplay.equalsIgnoreCase(designation.getUse().getDisplay()));
|
||||
}
|
||||
if (theDesignationValue != null) {
|
||||
stream = stream.filter(designation -> theDesignationValue.equalsIgnoreCase(designation.getValue()));
|
||||
}
|
||||
|
||||
Optional<ValueSet.ConceptReferenceDesignationComponent> first = stream.findFirst();
|
||||
if (!first.isPresent()) {
|
||||
String failureMessage = String.format("Concept %s did not contain designation [%s|%s|%s|%s|%s] ", theConcept.toString(), theLanguage, theUseSystem, theUseCode, theUseDisplay, theDesignationValue);
|
||||
fail(failureMessage);
|
||||
return null;
|
||||
} else {
|
||||
return first.get();
|
||||
}
|
||||
}
|
||||
|
||||
protected ValueSet.ValueSetExpansionContainsComponent assertExpandedValueSetContainsConcept(ValueSet theValueSet, String theSystem, String theCode, String theDisplay, Integer theDesignationCount) {
|
||||
List<ValueSet.ValueSetExpansionContainsComponent> contains = theValueSet.getExpansion().getContains();
|
||||
|
||||
Stream<ValueSet.ValueSetExpansionContainsComponent> stream = contains.stream();
|
||||
if (theSystem != null) {
|
||||
stream = stream.filter(concept -> theSystem.equalsIgnoreCase(concept.getSystem()));
|
||||
}
|
||||
if (theCode != null ) {
|
||||
stream = stream.filter(concept -> theCode.equalsIgnoreCase(concept.getCode()));
|
||||
}
|
||||
if (theDisplay != null){
|
||||
stream = stream.filter(concept -> theDisplay.equalsIgnoreCase(concept.getDisplay()));
|
||||
}
|
||||
if (theDesignationCount != null) {
|
||||
stream = stream.filter(concept -> concept.getDesignation().size() == theDesignationCount);
|
||||
}
|
||||
|
||||
Optional<ValueSet.ValueSetExpansionContainsComponent> first = stream.findFirst();
|
||||
if (!first.isPresent()) {
|
||||
String failureMessage = String.format("Expanded ValueSet %s did not contain concept [%s|%s|%s] with [%d] designations", theValueSet.getId(), theSystem, theCode, theDisplay, theDesignationCount);
|
||||
fail(failureMessage);
|
||||
return null;
|
||||
} else {
|
||||
return first.get();
|
||||
}
|
||||
}
|
||||
public List<String> getExpandedConceptsByValueSetUrl(String theValuesetUrl) {
|
||||
return runInTransaction(() -> {
|
||||
List<TermValueSet> valueSets = myTermValueSetDao.findTermValueSetByUrl(Pageable.unpaged(), theValuesetUrl);
|
||||
assertEquals(1, valueSets.size());
|
||||
TermValueSet valueSet = valueSets.get(0);
|
||||
List<TermValueSetConcept> concepts = valueSet.getConcepts();
|
||||
return concepts.stream().map(concept -> concept.getCode()).collect(Collectors.toList());
|
||||
});
|
||||
}
|
||||
}
|
||||
|
|
|
@ -59,7 +59,7 @@ public class FhirResourceDaoR4CodeSystemTest extends BaseJpaR4Test {
|
|||
// Now the background scheduler will do its thing
|
||||
myTerminologyDeferredStorageSvc.saveDeferred();
|
||||
runInTransaction(() -> {
|
||||
assertEquals(1, myTermCodeSystemDao.count());
|
||||
assertEquals(0, myTermCodeSystemDao.count());
|
||||
assertEquals(0, myTermCodeSystemVersionDao.count());
|
||||
assertEquals(0, myTermConceptDao.count());
|
||||
});
|
||||
|
@ -153,7 +153,7 @@ public class FhirResourceDaoR4CodeSystemTest extends BaseJpaR4Test {
|
|||
|
||||
// The remaining versions and Code System entities should be gone now.
|
||||
runInTransaction(() -> {
|
||||
assertEquals(1, myTermCodeSystemDao.count());
|
||||
assertEquals(0, myTermCodeSystemDao.count());
|
||||
assertNull(myTermCodeSystemDao.findByCodeSystemUri("http://foo"));
|
||||
assertEquals(0, myTermCodeSystemVersionDao.count());
|
||||
List<ResourceTable> resourceList = myResourceTableDao.findAll();
|
||||
|
|
|
@ -47,7 +47,6 @@ public class FhirResourceDaoR4DeleteTest extends BaseJpaR4Test {
|
|||
runInTransaction(() -> {
|
||||
ResourceTable resourceTable = myResourceTableDao.findById(id.getIdPartAsLong()).get();
|
||||
assertNotNull(resourceTable.getDeleted());
|
||||
assertTrue(resourceTable.isDeleted());
|
||||
});
|
||||
|
||||
// Current version should be marked as deleted
|
||||
|
|
|
@ -1,16 +1,13 @@
|
|||
package ca.uhn.fhir.jpa.dao.r4;
|
||||
|
||||
import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao;
|
||||
import ca.uhn.fhir.jpa.dao.FulltextSearchSvcImpl.Suggestion;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.param.*;
|
||||
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
||||
import ca.uhn.fhir.util.TestUtil;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.*;
|
||||
import org.hl7.fhir.r4.model.Observation.ObservationStatus;
|
||||
import org.junit.jupiter.api.AfterAll;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Disabled;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
@ -122,129 +119,6 @@ public class FhirResourceDaoR4SearchFtTest extends BaseJpaR4Test {
|
|||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSuggestIgnoresBase64Content() {
|
||||
Patient patient = new Patient();
|
||||
patient.addName().setFamily("testSuggest");
|
||||
IIdType ptId = myPatientDao.create(patient, mockSrd()).getId().toUnqualifiedVersionless();
|
||||
|
||||
// Attached to patient
|
||||
Observation obs1 = new Observation();
|
||||
obs1.setSubject(new Reference(ptId));
|
||||
obs1.getCode().setText("AAAAA");
|
||||
obs1.setValue(new StringType("Systolic Blood Pressure"));
|
||||
obs1.setStatus(ObservationStatus.FINAL);
|
||||
myObservationDao.create(obs1, mockSrd()).getId().toUnqualifiedVersionless();
|
||||
|
||||
// Not attached to patient
|
||||
Observation obs2 = new Observation();
|
||||
obs2.getCode().setText("AAAAA");
|
||||
obs2.setValue(new StringType("Diastolic Blood Pressure"));
|
||||
obs2.setStatus(ObservationStatus.FINAL);
|
||||
myObservationDao.create(obs2, mockSrd()).getId().toUnqualifiedVersionless();
|
||||
|
||||
|
||||
Media med = new Media();
|
||||
med.getSubject().setReferenceElement(ptId);
|
||||
med.getContent().setContentType("LCws");
|
||||
med.getContent().setDataElement(new Base64BinaryType(new byte[]{44, 44, 44, 44, 44, 44, 44, 44}));
|
||||
med.getContent().setTitle("bbbb syst");
|
||||
myMediaDao.create(med, mockSrd());
|
||||
ourLog.info(myFhirCtx.newJsonParser().encodeResourceToString(med));
|
||||
|
||||
List<Suggestion> output = mySearchDao.suggestKeywords("Patient/" + ptId.getIdPart() + "/$everything", "_content", "press", null);
|
||||
ourLog.info("Found: " + output);
|
||||
assertEquals(2, output.size());
|
||||
assertEquals("Pressure", output.get(0).getTerm());
|
||||
assertEquals("Systolic Blood Pressure", output.get(1).getTerm());
|
||||
|
||||
output = mySearchDao.suggestKeywords("Patient/" + ptId.getIdPart() + "/$everything", "_content", "prezure", null);
|
||||
ourLog.info("Found: " + output);
|
||||
assertEquals(2, output.size());
|
||||
assertEquals("Pressure", output.get(0).getTerm());
|
||||
assertEquals("Systolic Blood Pressure", output.get(1).getTerm());
|
||||
|
||||
output = mySearchDao.suggestKeywords("Patient/" + ptId.getIdPart() + "/$everything", "_content", "syst", null);
|
||||
ourLog.info("Found: " + output);
|
||||
assertEquals(4, output.size());
|
||||
assertEquals("syst", output.get(0).getTerm());
|
||||
assertEquals("bbbb syst", output.get(1).getTerm());
|
||||
assertEquals("Systolic", output.get(2).getTerm());
|
||||
assertEquals("Systolic Blood Pressure", output.get(3).getTerm());
|
||||
|
||||
output = mySearchDao.suggestKeywords("Patient/" + ptId.getIdPart() + "/$everything", "_content", "LCws", null);
|
||||
ourLog.info("Found: " + output);
|
||||
assertEquals(0, output.size());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSuggest() {
|
||||
Patient patient = new Patient();
|
||||
patient.addName().setFamily("testSuggest");
|
||||
IIdType ptId = myPatientDao.create(patient, mockSrd()).getId().toUnqualifiedVersionless();
|
||||
|
||||
Observation obs = new Observation();
|
||||
obs.getSubject().setReferenceElement(ptId);
|
||||
obs.getCode().setText("ZXCVBNM ASDFGHJKL QWERTYUIOPASDFGHJKL");
|
||||
myObservationDao.create(obs, mockSrd());
|
||||
|
||||
obs = new Observation();
|
||||
obs.getSubject().setReferenceElement(ptId);
|
||||
obs.getCode().setText("MNBVCXZ");
|
||||
myObservationDao.create(obs, mockSrd());
|
||||
|
||||
obs = new Observation();
|
||||
obs.getSubject().setReferenceElement(ptId);
|
||||
obs.getCode().setText("ZXC HELLO");
|
||||
obs.addComponent().getCode().setText("HHHHHHHHHH");
|
||||
myObservationDao.create(obs, mockSrd());
|
||||
|
||||
/*
|
||||
* These shouldn't match since they're for another patient
|
||||
*/
|
||||
patient = new Patient();
|
||||
patient.addName().setFamily("testSuggest2");
|
||||
IIdType ptId2 = myPatientDao.create(patient, mockSrd()).getId().toUnqualifiedVersionless();
|
||||
|
||||
Observation obs2 = new Observation();
|
||||
obs2.getSubject().setReferenceElement(ptId2);
|
||||
obs2.getCode().setText("ZXCVBNMZZ");
|
||||
myObservationDao.create(obs2, mockSrd());
|
||||
|
||||
List<Suggestion> output = mySearchDao.suggestKeywords("Patient/" + ptId.getIdPart() + "/$everything", "_content", "ZXCVBNM", null);
|
||||
ourLog.info("Found: " + output);
|
||||
assertEquals(4, output.size());
|
||||
assertEquals("ZXCVBNM", output.get(0).getTerm());
|
||||
assertEquals("ZXCVBNM ASDFGHJKL QWERTYUIOPASDFGHJKL", output.get(1).getTerm());
|
||||
assertEquals("ZXC", output.get(2).getTerm());
|
||||
assertEquals("ZXC HELLO", output.get(3).getTerm());
|
||||
|
||||
output = mySearchDao.suggestKeywords("Patient/" + ptId.getIdPart() + "/$everything", "_content", "ZXC", null);
|
||||
ourLog.info("Found: " + output);
|
||||
assertEquals(4, output.size());
|
||||
assertEquals("ZXC", output.get(0).getTerm());
|
||||
assertEquals("ZXC HELLO", output.get(1).getTerm());
|
||||
assertEquals("ZXCVBNM", output.get(2).getTerm());
|
||||
assertEquals("ZXCVBNM ASDFGHJKL QWERTYUIOPASDFGHJKL", output.get(3).getTerm());
|
||||
|
||||
output = mySearchDao.suggestKeywords("Patient/" + ptId.getIdPart() + "/$everything", "_content", "HELO", null);
|
||||
ourLog.info("Found: " + output);
|
||||
assertEquals(2, output.size());
|
||||
assertEquals("HELLO", output.get(0).getTerm());
|
||||
assertEquals("ZXC HELLO", output.get(1).getTerm());
|
||||
|
||||
output = mySearchDao.suggestKeywords("Patient/" + ptId.getIdPart() + "/$everything", "_content", "Z", null);
|
||||
ourLog.info("Found: " + output);
|
||||
assertEquals(0, output.size());
|
||||
|
||||
output = mySearchDao.suggestKeywords("Patient/" + ptId.getIdPart() + "/$everything", "_content", "ZX", null);
|
||||
ourLog.info("Found: " + output);
|
||||
assertEquals(2, output.size());
|
||||
assertEquals("ZXC", output.get(0).getTerm());
|
||||
assertEquals("ZXC HELLO", output.get(1).getTerm());
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSearchAndReindex() {
|
||||
Patient patient;
|
||||
|
|
|
@ -1,6 +1,5 @@
|
|||
package ca.uhn.fhir.jpa.dao.r4;
|
||||
|
||||
import ca.uhn.fhir.jpa.dao.LegacySearchBuilder;
|
||||
import ca.uhn.fhir.jpa.search.builder.SearchBuilder;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.rest.param.ReferenceParam;
|
||||
|
|
|
@ -6,7 +6,7 @@ import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
|||
import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao;
|
||||
import ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.dao.JpaResourceDao;
|
||||
import ca.uhn.fhir.jpa.entity.Search;
|
||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceEncodingEnum;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamString;
|
||||
|
@ -50,6 +50,8 @@ import org.apache.commons.lang3.StringUtils;
|
|||
import org.apache.commons.lang3.Validate;
|
||||
import org.hamcrest.Matchers;
|
||||
import org.hamcrest.core.StringContains;
|
||||
import org.hibernate.search.mapper.orm.Search;
|
||||
import org.hibernate.search.mapper.orm.session.SearchSession;
|
||||
import org.hl7.fhir.instance.model.api.IAnyResource;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
|
@ -125,6 +127,7 @@ import java.util.concurrent.Future;
|
|||
import static org.apache.commons.lang3.StringUtils.countMatches;
|
||||
import static org.apache.commons.lang3.StringUtils.defaultString;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.contains;
|
||||
import static org.hamcrest.Matchers.containsInAnyOrder;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
|
@ -286,8 +289,42 @@ public class FhirResourceDaoR4Test extends BaseJpaR4Test {
|
|||
assertEquals(BaseHapiFhirDao.INDEX_STATUS_INDEXED, tableOpt.get().getIndexStatus().longValue());
|
||||
assertThat(myResourceIndexedSearchParamTokenDao.countForResourceId(id1.getIdPartAsLong()), not(greaterThan(0)));
|
||||
});
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTermConceptReindexingDoesntDuplicateData() {
|
||||
myDaoConfig.setSchedulingDisabled(true);
|
||||
|
||||
|
||||
CodeSystem cs = new CodeSystem();
|
||||
cs.setId("nhin-use");
|
||||
cs.setUrl("http://zoop.com");
|
||||
cs.setContent(CodeSystem.CodeSystemContentMode.COMPLETE);
|
||||
cs.addConcept().setCode("zoop1").setDisplay("zoop_disp1").setDefinition("zoop_defi1");
|
||||
cs.addConcept().setCode("zoop2").setDisplay("zoop_disp2").setDefinition("zoop_defi2");
|
||||
cs.addConcept().setCode("zoop3").setDisplay("zoop_disp3").setDefinition("zoop_defi3");
|
||||
|
||||
IIdType id1 = myCodeSystemDao.create(cs).getId().toUnqualifiedVersionless();
|
||||
|
||||
runInTransaction(() -> {
|
||||
assertEquals(3L, myTermConceptDao.count());
|
||||
|
||||
SearchSession session = Search.session(myEntityManager);
|
||||
List<TermConcept> termConcepts = session.search(TermConcept.class).where(f -> f.matchAll()).fetchAllHits();
|
||||
assertEquals(3, termConcepts.size());
|
||||
});
|
||||
|
||||
myResourceReindexingSvc.markAllResourcesForReindexing();
|
||||
myResourceReindexingSvc.forceReindexingPass();
|
||||
myTerminologyDeferredStorageSvc.saveAllDeferred();
|
||||
|
||||
runInTransaction(() -> {
|
||||
assertEquals(3L, myTermConceptDao.count());
|
||||
|
||||
SearchSession session = Search.session(myEntityManager);
|
||||
List<TermConcept> termConcepts = session.search(TermConcept.class).where(f -> f.matchAll()).fetchAllHits();
|
||||
assertEquals(3, termConcepts.size());
|
||||
});
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -3948,7 +3985,7 @@ public class FhirResourceDaoR4Test extends BaseJpaR4Test {
|
|||
String uuid = UUID.randomUUID().toString();
|
||||
|
||||
runInTransaction(() -> {
|
||||
Search search = new Search();
|
||||
ca.uhn.fhir.jpa.entity.Search search = new ca.uhn.fhir.jpa.entity.Search();
|
||||
SearchCoordinatorSvcImpl.populateSearchEntity(map, "Encounter", uuid, normalized, search, RequestPartitionId.allPartitions());
|
||||
search.setStatus(SearchStatusEnum.FAILED);
|
||||
search.setFailureCode(500);
|
||||
|
|
|
@ -45,6 +45,8 @@ import ca.uhn.fhir.jpa.dao.data.ITermValueSetConceptDao;
|
|||
import ca.uhn.fhir.jpa.dao.data.ITermValueSetConceptDesignationDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.ITermValueSetDao;
|
||||
import ca.uhn.fhir.jpa.dao.dstu2.FhirResourceDaoDstu2SearchNoFtTest;
|
||||
import ca.uhn.fhir.jpa.entity.TermValueSet;
|
||||
import ca.uhn.fhir.jpa.entity.TermValueSetConcept;
|
||||
import ca.uhn.fhir.jpa.interceptor.PerformanceTracingLoggingInterceptor;
|
||||
import ca.uhn.fhir.jpa.model.entity.ModelConfig;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamString;
|
||||
|
@ -58,6 +60,7 @@ import ca.uhn.fhir.jpa.searchparam.registry.SearchParamRegistryImpl;
|
|||
import ca.uhn.fhir.jpa.subscription.match.registry.SubscriptionRegistry;
|
||||
import ca.uhn.fhir.jpa.term.BaseTermReadSvcImpl;
|
||||
import ca.uhn.fhir.jpa.term.TermDeferredStorageSvcImpl;
|
||||
import ca.uhn.fhir.jpa.term.ValueSetExpansionR4Test;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermCodeSystemStorageSvc;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermDeferredStorageSvc;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermReadSvcR5;
|
||||
|
@ -73,8 +76,8 @@ import ca.uhn.fhir.util.UrlUtil;
|
|||
import ca.uhn.fhir.validation.FhirValidator;
|
||||
import ca.uhn.fhir.validation.ValidationResult;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.hibernate.search.jpa.FullTextEntityManager;
|
||||
import org.hibernate.search.jpa.Search;
|
||||
import org.hibernate.search.mapper.orm.Search;
|
||||
import org.hibernate.search.mapper.orm.session.SearchSession;
|
||||
import org.hl7.fhir.common.hapi.validation.validator.FhirInstanceValidator;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.r5.model.AllergyIntolerance;
|
||||
|
@ -137,6 +140,7 @@ import org.junit.jupiter.api.extension.ExtendWith;
|
|||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.junit.jupiter.SpringExtension;
|
||||
import org.springframework.test.util.AopTestUtils;
|
||||
|
@ -149,7 +153,11 @@ import java.io.InputStream;
|
|||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.Stream;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
import static org.mockito.Mockito.mock;
|
||||
|
||||
|
@ -454,10 +462,10 @@ public abstract class BaseJpaR5Test extends BaseJpaTest {
|
|||
@BeforeEach
|
||||
public void beforeFlushFT() {
|
||||
runInTransaction(() -> {
|
||||
FullTextEntityManager ftem = Search.getFullTextEntityManager(myEntityManager);
|
||||
ftem.purgeAll(ResourceTable.class);
|
||||
ftem.purgeAll(ResourceIndexedSearchParamString.class);
|
||||
ftem.flushToIndexes();
|
||||
SearchSession searchSession = Search.session(myEntityManager);
|
||||
searchSession.workspace(ResourceTable.class).purge();
|
||||
// searchSession.workspace(ResourceIndexedSearchParamString.class).purge();
|
||||
searchSession.indexingPlan().execute();
|
||||
});
|
||||
|
||||
myDaoConfig.setSchedulingDisabled(true);
|
||||
|
@ -626,5 +634,40 @@ public abstract class BaseJpaR5Test extends BaseJpaTest {
|
|||
String[] uuidParams = params.get(Constants.PARAM_PAGINGACTION);
|
||||
return uuidParams[0];
|
||||
}
|
||||
protected ValueSet.ValueSetExpansionContainsComponent assertExpandedValueSetContainsConcept(ValueSet theValueSet, String theSystem, String theCode, String theDisplay, Integer theDesignationCount) {
|
||||
List<ValueSet.ValueSetExpansionContainsComponent> contains = theValueSet.getExpansion().getContains();
|
||||
|
||||
Stream<ValueSet.ValueSetExpansionContainsComponent> stream = contains.stream();
|
||||
if (theSystem != null) {
|
||||
stream = stream.filter(concept -> theSystem.equalsIgnoreCase(concept.getSystem()));
|
||||
}
|
||||
if (theCode != null ) {
|
||||
stream = stream.filter(concept -> theCode.equalsIgnoreCase(concept.getCode()));
|
||||
}
|
||||
if (theDisplay != null){
|
||||
stream = stream.filter(concept -> theDisplay.equalsIgnoreCase(concept.getDisplay()));
|
||||
}
|
||||
if (theDesignationCount != null) {
|
||||
stream = stream.filter(concept -> concept.getDesignation().size() == theDesignationCount);
|
||||
}
|
||||
|
||||
Optional<ValueSet.ValueSetExpansionContainsComponent> first = stream.findFirst();
|
||||
if (!first.isPresent()) {
|
||||
String failureMessage = String.format("Expanded ValueSet %s did not contain concept [%s|%s|%s] with [%d] designations", theValueSet.getId(), theSystem, theCode, theDisplay, theDesignationCount);
|
||||
fail(failureMessage);
|
||||
return null;
|
||||
} else {
|
||||
return first.get();
|
||||
}
|
||||
}
|
||||
public List<String> getExpandedConceptsByValueSetUrl(String theValuesetUrl) {
|
||||
return runInTransaction(() -> {
|
||||
List<TermValueSet> valueSets = myTermValueSetDao.findTermValueSetByUrl(Pageable.unpaged(), theValuesetUrl);
|
||||
assertEquals(1, valueSets.size());
|
||||
TermValueSet valueSet = valueSets.get(0);
|
||||
List<TermValueSetConcept> concepts = valueSet.getConcepts();
|
||||
return concepts.stream().map(concept -> concept.getCode()).collect(Collectors.toList());
|
||||
});
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -43,7 +43,7 @@ public class FhirResourceDaoR5CodeSystemTest extends BaseJpaR5Test {
|
|||
// Now the background scheduler will do its thing
|
||||
myTermDeferredStorageSvc.saveDeferred();
|
||||
runInTransaction(() -> {
|
||||
assertEquals(1, myTermCodeSystemDao.count());
|
||||
assertEquals(0, myTermCodeSystemDao.count());
|
||||
assertEquals(0, myTermCodeSystemVersionDao.count());
|
||||
assertEquals(0, myTermConceptDao.count());
|
||||
});
|
||||
|
@ -137,7 +137,7 @@ public class FhirResourceDaoR5CodeSystemTest extends BaseJpaR5Test {
|
|||
|
||||
// The remaining versions and Code System entities should be gone now.
|
||||
runInTransaction(() -> {
|
||||
assertEquals(1, myTermCodeSystemDao.count());
|
||||
assertEquals(0, myTermCodeSystemDao.count());
|
||||
assertNull(myTermCodeSystemDao.findByCodeSystemUri("http://foo"));
|
||||
assertEquals(0, myTermCodeSystemVersionDao.count());
|
||||
List<ResourceTable> resourceList = myResourceTableDao.findAll();
|
||||
|
|
|
@ -4,6 +4,7 @@ import ca.uhn.fhir.jpa.config.BaseConfig;
|
|||
import ca.uhn.fhir.jpa.dao.r4.BaseJpaR4Test;
|
||||
import ca.uhn.fhir.rest.api.PatchTypeEnum;
|
||||
import ca.uhn.fhir.rest.server.exceptions.PreconditionFailedException;
|
||||
import ca.uhn.fhir.validation.ResultSeverityEnum;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.CanonicalType;
|
||||
import org.hl7.fhir.r4.model.CodeType;
|
||||
|
@ -29,6 +30,7 @@ import static org.hamcrest.MatcherAssert.assertThat;
|
|||
import static org.hamcrest.Matchers.containsInAnyOrder;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
|
||||
public class RepositoryValidatingInterceptorR4Test extends BaseJpaR4Test {
|
||||
|
@ -262,12 +264,34 @@ public class RepositoryValidatingInterceptorR4Test extends BaseJpaR4Test {
|
|||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRequireValidation_FailNoRejectAndTag() {
|
||||
List<IRepositoryValidatingRule> rules = newRuleBuilder()
|
||||
.forResourcesOfType("Observation")
|
||||
.requireValidationToDeclaredProfiles()
|
||||
.withBestPracticeWarningLevel("IGNORE")
|
||||
.dontReject()
|
||||
.tagOnSeverity(ResultSeverityEnum.ERROR, "http://foo", "validation-error")
|
||||
.build();
|
||||
myValInterceptor.setRules(rules);
|
||||
|
||||
Observation obs = new Observation();
|
||||
obs.getCode().addCoding().setSystem("http://foo").setCode("123").setDisplay("help im a bug");
|
||||
IIdType id = myObservationDao.create(obs).getId();
|
||||
assertEquals("1", id.getVersionIdPart());
|
||||
|
||||
obs = myObservationDao.read(id);
|
||||
assertTrue(obs.getMeta().hasTag());
|
||||
assertTrue(obs.getMeta().getTag("http://foo", "validation-error") != null);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRequireValidation_Blocked() {
|
||||
List<IRepositoryValidatingRule> rules = newRuleBuilder()
|
||||
.forResourcesOfType("Observation")
|
||||
.requireValidationToDeclaredProfiles()
|
||||
.withBestPracticeWarningLevel("IGNORE")
|
||||
.rejectOnSeverity("error")
|
||||
.build();
|
||||
myValInterceptor.setRules(rules);
|
||||
|
||||
|
|
|
@ -194,99 +194,6 @@ public class SystemProviderDstu2Test extends BaseJpaDstu2Test {
|
|||
}
|
||||
}
|
||||
|
||||
@Transactional(propagation = Propagation.NEVER)
|
||||
@Test
|
||||
public void testSuggestKeywords() throws Exception {
|
||||
|
||||
Patient patient = new Patient();
|
||||
patient.addName().addFamily("testSuggest");
|
||||
IIdType ptId = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
|
||||
|
||||
Observation obs = new Observation();
|
||||
obs.getCode().setText("ZXCVBNM ASDFGHJKL QWERTYUIOPASDFGHJKL");
|
||||
obs.getSubject().setReference(ptId);
|
||||
IIdType obsId = myObservationDao.create(obs, mySrd).getId().toUnqualifiedVersionless();
|
||||
|
||||
obs = new Observation();
|
||||
obs.setId(obsId);
|
||||
obs.getSubject().setReference(ptId);
|
||||
obs.getCode().setText("ZXCVBNM ASDFGHJKL QWERTYUIOPASDFGHJKL");
|
||||
myObservationDao.update(obs, mySrd);
|
||||
|
||||
// Try to wait for the indexing to complete
|
||||
waitForSize(2, ()-> fetchSuggestionCount(ptId));
|
||||
|
||||
HttpGet get = new HttpGet(ourServerBase + "/$suggest-keywords?context=Patient/" + ptId.getIdPart() + "/$everything&searchParam=_content&text=zxc&_pretty=true&_format=xml");
|
||||
try (CloseableHttpResponse http = ourHttpClient.execute(get)) {
|
||||
assertEquals(200, http.getStatusLine().getStatusCode());
|
||||
String output = IOUtils.toString(http.getEntity().getContent(), StandardCharsets.UTF_8);
|
||||
ourLog.info(output);
|
||||
|
||||
Parameters parameters = ourCtx.newXmlParser().parseResource(Parameters.class, output);
|
||||
assertEquals(2, parameters.getParameter().size());
|
||||
assertEquals("keyword", parameters.getParameter().get(0).getPart().get(0).getName());
|
||||
assertEquals(new StringDt("ZXCVBNM"), parameters.getParameter().get(0).getPart().get(0).getValue());
|
||||
assertEquals("score", parameters.getParameter().get(0).getPart().get(1).getName());
|
||||
assertEquals(new DecimalDt("1.0"), parameters.getParameter().get(0).getPart().get(1).getValue());
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
private Number fetchSuggestionCount(IIdType thePtId) throws IOException {
|
||||
HttpGet get = new HttpGet(ourServerBase + "/$suggest-keywords?context=Patient/" + thePtId.getIdPart() + "/$everything&searchParam=_content&text=zxc&_pretty=true&_format=xml");
|
||||
try (CloseableHttpResponse http = ourHttpClient.execute(get)) {
|
||||
assertEquals(200, http.getStatusLine().getStatusCode());
|
||||
String output = IOUtils.toString(http.getEntity().getContent(), StandardCharsets.UTF_8);
|
||||
Parameters parameters = ourCtx.newXmlParser().parseResource(Parameters.class, output);
|
||||
return parameters.getParameter().size();
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSuggestKeywordsInvalid() throws Exception {
|
||||
Patient patient = new Patient();
|
||||
patient.addName().addFamily("testSuggest");
|
||||
IIdType ptId = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless();
|
||||
|
||||
Observation obs = new Observation();
|
||||
obs.getSubject().setReference(ptId);
|
||||
obs.getCode().setText("ZXCVBNM ASDFGHJKL QWERTYUIOPASDFGHJKL");
|
||||
myObservationDao.create(obs, mySrd);
|
||||
|
||||
HttpGet get = new HttpGet(ourServerBase + "/$suggest-keywords");
|
||||
CloseableHttpResponse http = ourHttpClient.execute(get);
|
||||
try {
|
||||
assertEquals(400, http.getStatusLine().getStatusCode());
|
||||
String output = IOUtils.toString(http.getEntity().getContent(), StandardCharsets.UTF_8);
|
||||
ourLog.info(output);
|
||||
assertThat(output, containsString("Parameter 'context' must be provided"));
|
||||
} finally {
|
||||
http.close();
|
||||
}
|
||||
|
||||
get = new HttpGet(ourServerBase + "/$suggest-keywords?context=Patient/" + ptId.getIdPart() + "/$everything");
|
||||
http = ourHttpClient.execute(get);
|
||||
try {
|
||||
assertEquals(400, http.getStatusLine().getStatusCode());
|
||||
String output = IOUtils.toString(http.getEntity().getContent(), StandardCharsets.UTF_8);
|
||||
ourLog.info(output);
|
||||
assertThat(output, containsString("Parameter 'searchParam' must be provided"));
|
||||
} finally {
|
||||
http.close();
|
||||
}
|
||||
|
||||
get = new HttpGet(ourServerBase + "/$suggest-keywords?context=Patient/" + ptId.getIdPart() + "/$everything&searchParam=aa");
|
||||
http = ourHttpClient.execute(get);
|
||||
try {
|
||||
assertEquals(400, http.getStatusLine().getStatusCode());
|
||||
String output = IOUtils.toString(http.getEntity().getContent(), StandardCharsets.UTF_8);
|
||||
ourLog.info(output);
|
||||
assertThat(output, containsString("Parameter 'text' must be provided"));
|
||||
} finally {
|
||||
http.close();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetOperationDefinition() {
|
||||
|
|
|
@ -14,6 +14,7 @@ import org.junit.jupiter.api.Test;
|
|||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
|
@ -35,7 +36,11 @@ public class ResourceProviderDstu3CodeSystemTest extends BaseResourceProviderDst
|
|||
|
||||
@Test
|
||||
public void testLookupOnExternalCode() {
|
||||
ResourceProviderDstu3ValueSetTest.createExternalCs(myCodeSystemDao, myResourceTableDao, myTermCodeSystemStorageSvc, mySrd);
|
||||
ResourceProviderDstu3ValueSetTest.createExternalCs(myCodeSystemDao, myResourceTableDao, myTermCodeSystemStorageSvc, mySrd, myCaptureQueriesListener);
|
||||
|
||||
runInTransaction(()->{
|
||||
ourLog.info("Code system versions:\n * " + myTermCodeSystemVersionDao.findAll().stream().map(t->t.toString()).collect(Collectors.joining("\n * ")));
|
||||
});
|
||||
|
||||
Parameters respParam = ourClient
|
||||
.operation()
|
||||
|
|
|
@ -7,6 +7,8 @@ import ca.uhn.fhir.jpa.dao.r4.FhirResourceDaoR4TerminologyTest;
|
|||
import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion;
|
||||
import ca.uhn.fhir.jpa.entity.TermConcept;
|
||||
import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink.RelationshipTypeEnum;
|
||||
import ca.uhn.fhir.jpa.util.BaseCaptureQueriesListener;
|
||||
import ca.uhn.fhir.jpa.util.CircularQueueCaptureQueriesListener;
|
||||
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermCodeSystemStorageSvc;
|
||||
|
@ -949,6 +951,11 @@ public class ResourceProviderDstu3ValueSetTest extends BaseResourceProviderDstu3
|
|||
|
||||
|
||||
public static CodeSystem createExternalCs(IFhirResourceDao<CodeSystem> theCodeSystemDao, IResourceTableDao theResourceTableDao, ITermCodeSystemStorageSvc theTermCodeSystemStorageSvc, ServletRequestDetails theRequestDetails) {
|
||||
return createExternalCs(theCodeSystemDao, theResourceTableDao, theTermCodeSystemStorageSvc, theRequestDetails, null);
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
public static CodeSystem createExternalCs(IFhirResourceDao<CodeSystem> theCodeSystemDao, IResourceTableDao theResourceTableDao, ITermCodeSystemStorageSvc theTermCodeSystemStorageSvc, ServletRequestDetails theRequestDetails, CircularQueueCaptureQueriesListener theCaptureQueriesListener) {
|
||||
CodeSystem codeSystem = new CodeSystem();
|
||||
codeSystem.setUrl(URL_MY_CODE_SYSTEM);
|
||||
codeSystem.setContent(CodeSystemContentMode.NOTPRESENT);
|
||||
|
@ -979,7 +986,15 @@ public class ResourceProviderDstu3ValueSetTest extends BaseResourceProviderDstu3
|
|||
TermConcept parentB = new TermConcept(cs, "ParentB").setDisplay("Parent B");
|
||||
cs.getConcepts().add(parentB);
|
||||
|
||||
|
||||
ourLog.info("About to update CodeSystem");
|
||||
if (theCaptureQueriesListener != null) {
|
||||
theCaptureQueriesListener.clear();
|
||||
}
|
||||
theTermCodeSystemStorageSvc.storeNewCodeSystemVersion(new ResourcePersistentId(table.getId()), URL_MY_CODE_SYSTEM, "SYSTEM NAME", "SYSTEM VERSION", cs, table);
|
||||
if (theCaptureQueriesListener != null) {
|
||||
theCaptureQueriesListener.logAllQueries();
|
||||
}
|
||||
return codeSystem;
|
||||
}
|
||||
|
||||
|
|
|
@ -50,6 +50,8 @@ import static ca.uhn.fhir.jpa.dao.dstu3.FhirResourceDaoDstu3TerminologyTest.URL_
|
|||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.containsStringIgnoringCase;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.not;
|
||||
import static org.hamcrest.Matchers.stringContainsInOrder;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
|
@ -1110,68 +1112,24 @@ public class ResourceProviderDstu3ValueSetVersionedTest extends BaseResourceProv
|
|||
assertEquals(theCodeSystem.getConcept().size(), termValueSet.getConcepts().size());
|
||||
assertEquals(TermValueSetPreExpansionStatusEnum.EXPANDED, termValueSet.getExpansionStatus());
|
||||
|
||||
TermValueSetConcept concept = termValueSet.getConcepts().get(0);
|
||||
ourLog.info("Concept:\n" + concept.toString());
|
||||
assertEquals("http://acme.org", concept.getSystem());
|
||||
assertEquals("1", concept.getSystemVersion());
|
||||
assertEquals("8450-9", concept.getCode());
|
||||
assertEquals("Systolic blood pressure--expiration", concept.getDisplay());
|
||||
assertEquals(2, concept.getDesignations().size());
|
||||
assertEquals(0, concept.getOrder());
|
||||
TermValueSetConcept concept = assertTermValueSetContainsConceptAndIsInDeclaredOrder(termValueSet, "http://acme.org", "8450-9", "Systolic blood pressure--expiration", 2);
|
||||
assertTermConceptContainsDesignation(concept, "nl", "http://snomed.info/sct", "900000000000013009", "Synonym", "Systolische bloeddruk - expiratie");
|
||||
assertTermConceptContainsDesignation(concept, "sv", "http://snomed.info/sct", "900000000000013009", "Synonym", "Systoliskt blodtryck - utgång");
|
||||
|
||||
TermValueSetConceptDesignation designation = concept.getDesignations().get(0);
|
||||
assertEquals("nl", designation.getLanguage());
|
||||
assertEquals("http://snomed.info/sct", designation.getUseSystem());
|
||||
assertEquals("900000000000013009", designation.getUseCode());
|
||||
assertEquals("Synonym", designation.getUseDisplay());
|
||||
assertEquals("Systolische bloeddruk - expiratie", designation.getValue());
|
||||
|
||||
designation = concept.getDesignations().get(1);
|
||||
assertEquals("sv", designation.getLanguage());
|
||||
assertEquals("http://snomed.info/sct", designation.getUseSystem());
|
||||
assertEquals("900000000000013009", designation.getUseCode());
|
||||
assertEquals("Synonym", designation.getUseDisplay());
|
||||
assertEquals("Systoliskt blodtryck - utgång", designation.getValue());
|
||||
|
||||
concept = termValueSet.getConcepts().get(1);
|
||||
ourLog.info("Concept:\n" + concept.toString());
|
||||
assertEquals("http://acme.org", concept.getSystem());
|
||||
assertEquals("1", concept.getSystemVersion());
|
||||
assertEquals("11378-7", concept.getCode());
|
||||
assertEquals("Systolic blood pressure at First encounter", concept.getDisplay());
|
||||
assertEquals(0, concept.getDesignations().size());
|
||||
assertEquals(1, concept.getOrder());
|
||||
assertTermValueSetContainsConceptAndIsInDeclaredOrder(termValueSet, "http://acme.org", "11378-7", "Systolic blood pressure at First encounter", 0);
|
||||
|
||||
// ...
|
||||
|
||||
concept = termValueSet.getConcepts().get(22);
|
||||
ourLog.info("Concept:\n" + concept.toString());
|
||||
assertEquals("http://acme.org", concept.getSystem());
|
||||
assertEquals("1", concept.getSystemVersion());
|
||||
assertEquals("8491-3", concept.getCode());
|
||||
assertEquals("Systolic blood pressure 1 hour minimum", concept.getDisplay());
|
||||
assertEquals(1, concept.getDesignations().size());
|
||||
assertEquals(22, concept.getOrder());
|
||||
TermValueSetConcept otherConcept = assertTermValueSetContainsConceptAndIsInDeclaredOrder(termValueSet, "http://acme.org", "8491-3", "Systolic blood pressure 1 hour minimum", 1);
|
||||
assertTermConceptContainsDesignation(otherConcept, "nl", "http://snomed.info/sct", "900000000000013009", "Synonym", "Systolische bloeddruk minimaal 1 uur");
|
||||
|
||||
designation = concept.getDesignations().get(0);
|
||||
assertEquals("nl", designation.getLanguage());
|
||||
assertEquals("http://snomed.info/sct", designation.getUseSystem());
|
||||
assertEquals("900000000000013009", designation.getUseCode());
|
||||
assertEquals("Synonym", designation.getUseDisplay());
|
||||
assertEquals("Systolische bloeddruk minimaal 1 uur", designation.getValue());
|
||||
assertTermValueSetContainsConceptAndIsInDeclaredOrder(termValueSet, "http://acme.org", "8492-1", "Systolic blood pressure 8 hour minimum", 0);
|
||||
|
||||
concept = termValueSet.getConcepts().get(23);
|
||||
ourLog.info("Concept:\n" + concept.toString());
|
||||
assertEquals("http://acme.org", concept.getSystem());
|
||||
assertEquals("1", concept.getSystemVersion());
|
||||
assertEquals("8492-1", concept.getCode());
|
||||
assertEquals("Systolic blood pressure 8 hour minimum", concept.getDisplay());
|
||||
assertEquals(0, concept.getDesignations().size());
|
||||
assertEquals(23, concept.getOrder());
|
||||
});
|
||||
}
|
||||
|
||||
private void validateTermValueSetExpandedAndChildrenV2(String theValueSetName, CodeSystem theCodeSystem) {
|
||||
|
||||
runInTransaction(() -> {
|
||||
Optional<TermValueSet> optionalValueSetByResourcePid = myTermValueSetDao.findByResourcePid(myExtensionalVsIdOnResourceTable_v2);
|
||||
assertTrue(optionalValueSetByResourcePid.isPresent());
|
||||
|
@ -1187,64 +1145,22 @@ public class ResourceProviderDstu3ValueSetVersionedTest extends BaseResourceProv
|
|||
assertEquals(theCodeSystem.getConcept().size(), termValueSet.getConcepts().size());
|
||||
assertEquals(TermValueSetPreExpansionStatusEnum.EXPANDED, termValueSet.getExpansionStatus());
|
||||
|
||||
TermValueSetConcept concept = termValueSet.getConcepts().get(0);
|
||||
ourLog.info("Concept:\n" + concept.toString());
|
||||
assertEquals("http://acme.org", concept.getSystem());
|
||||
assertEquals("2", concept.getSystemVersion());
|
||||
assertEquals("8450-9", concept.getCode());
|
||||
assertEquals("Systolic blood pressure--expiration v2", concept.getDisplay());
|
||||
assertEquals(2, concept.getDesignations().size());
|
||||
assertEquals(0, concept.getOrder());
|
||||
TermValueSetConcept concept = assertTermValueSetContainsConceptAndIsInDeclaredOrder(termValueSet, "http://acme.org", "8450-9", "Systolic blood pressure--expiration v2", 2);
|
||||
assertThat(concept.getSystemVersion(), is(equalTo("2")));
|
||||
assertTermConceptContainsDesignation(concept, "nl", "http://snomed.info/sct", "900000000000013009", "Synonym", "Systolische bloeddruk - expiratie");
|
||||
assertTermConceptContainsDesignation(concept, "sv", "http://snomed.info/sct", "900000000000013009", "Synonym", "Systoliskt blodtryck - utgång");
|
||||
|
||||
TermValueSetConceptDesignation designation = concept.getDesignations().get(0);
|
||||
assertEquals("nl", designation.getLanguage());
|
||||
assertEquals("http://snomed.info/sct", designation.getUseSystem());
|
||||
assertEquals("900000000000013009", designation.getUseCode());
|
||||
assertEquals("Synonym", designation.getUseDisplay());
|
||||
assertEquals("Systolische bloeddruk - expiratie", designation.getValue());
|
||||
|
||||
designation = concept.getDesignations().get(1);
|
||||
assertEquals("sv", designation.getLanguage());
|
||||
assertEquals("http://snomed.info/sct", designation.getUseSystem());
|
||||
assertEquals("900000000000013009", designation.getUseCode());
|
||||
assertEquals("Synonym", designation.getUseDisplay());
|
||||
assertEquals("Systoliskt blodtryck - utgång", designation.getValue());
|
||||
|
||||
concept = termValueSet.getConcepts().get(1);
|
||||
ourLog.info("Concept:\n" + concept.toString());
|
||||
assertEquals("http://acme.org", concept.getSystem());
|
||||
assertEquals("2", concept.getSystemVersion());
|
||||
assertEquals("11378-7", concept.getCode());
|
||||
assertEquals("Systolic blood pressure at First encounter v2", concept.getDisplay());
|
||||
assertEquals(0, concept.getDesignations().size());
|
||||
assertEquals(1, concept.getOrder());
|
||||
TermValueSetConcept termValueSetConcept1 = assertTermValueSetContainsConceptAndIsInDeclaredOrder(termValueSet, "http://acme.org", "11378-7", "Systolic blood pressure at First encounter v2", 0);
|
||||
assertThat(termValueSetConcept1.getSystemVersion(), is(equalTo("2")));
|
||||
|
||||
// ...
|
||||
|
||||
concept = termValueSet.getConcepts().get(22);
|
||||
ourLog.info("Concept:\n" + concept.toString());
|
||||
assertEquals("http://acme.org", concept.getSystem());
|
||||
assertEquals("2", concept.getSystemVersion());
|
||||
assertEquals("8491-3", concept.getCode());
|
||||
assertEquals("Systolic blood pressure 1 hour minimum v2", concept.getDisplay());
|
||||
assertEquals(1, concept.getDesignations().size());
|
||||
assertEquals(22, concept.getOrder());
|
||||
TermValueSetConcept otherConcept = assertTermValueSetContainsConceptAndIsInDeclaredOrder(termValueSet, "http://acme.org", "8491-3", "Systolic blood pressure 1 hour minimum v2", 1);
|
||||
assertThat(otherConcept.getSystemVersion(), is(equalTo("2")));
|
||||
assertTermConceptContainsDesignation(otherConcept, "nl", "http://snomed.info/sct", "900000000000013009", "Synonym", "Systolische bloeddruk minimaal 1 uur");
|
||||
|
||||
designation = concept.getDesignations().get(0);
|
||||
assertEquals("nl", designation.getLanguage());
|
||||
assertEquals("http://snomed.info/sct", designation.getUseSystem());
|
||||
assertEquals("900000000000013009", designation.getUseCode());
|
||||
assertEquals("Synonym", designation.getUseDisplay());
|
||||
assertEquals("Systolische bloeddruk minimaal 1 uur", designation.getValue());
|
||||
|
||||
concept = termValueSet.getConcepts().get(23);
|
||||
ourLog.info("Concept:\n" + concept.toString());
|
||||
assertEquals("http://acme.org", concept.getSystem());
|
||||
assertEquals("2", concept.getSystemVersion());
|
||||
assertEquals("8492-1", concept.getCode());
|
||||
assertEquals("Systolic blood pressure 8 hour minimum v2", concept.getDisplay());
|
||||
assertEquals(0, concept.getDesignations().size());
|
||||
assertEquals(23, concept.getOrder());
|
||||
TermValueSetConcept termValueSetConcept = assertTermValueSetContainsConceptAndIsInDeclaredOrder(termValueSet, "http://acme.org", "8492-1", "Systolic blood pressure 8 hour minimum v2", 0);
|
||||
assertThat(termValueSetConcept.getSystemVersion(), is(equalTo("2")));
|
||||
});
|
||||
}
|
||||
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue