From efc6809ea689eb95436174f24ec498ecf77fb981 Mon Sep 17 00:00:00 2001 From: Tadgh Date: Tue, 5 Jan 2021 17:56:59 -0500 Subject: [PATCH] Replacing Hibernate Search 5 with Hibernate Search 6 (#2190) * Beginning work on converting to hibernate search 6 * replace all out of date properties * replace all POM references to old versions * remove the shaded ES jars as HS6 supports 7.9 * Convert "Bridges" to RoutingBinder and RoutingBridge * Modernize all indexing annotations * Begin refactoring of search queries (wip) * Fix spatial API changes, work on BaseTermReadSvcImpl * Most of the way through the various filter property conversions * Finished i think with BaseTermReadSvcImpl * Compiling, but definitely broken * Start test compilation failures * All tests compiling (i think) * remove suggest keywords operation * Fix bootstrap errors, still have to deal with transient fields * Rollback CR * More refactoring, got IDs baked back into docs * Fix coord util, fix a few more fields which require projection * merge fixes * begin refactor of valuesetexpansion tests * fix another test * Update tests * remove todos, add TestContainers * Fix descendant in * Add testcontainers, fix another test * fix more tests, replace embedded es * Rip out embedded elastic, replace with TestContainer * merge conflicts for new analyzers * Bump to CR2 for hibernate search 6. Add forgotten CLI dep. Add backwards compatible codecs * Remove dead deps, update testcontainer * Remove java-hamcrest * Pull asserts up, refactor tests to pass in RP tests * Update V2 check * Remove suggest keyword tests * Split line for testing * Fix perhaps one of the dumbest programming mistakes in my career so far * Refactor tests which no longer rely on a strict known order * Fix up test config, disable log test temporarily * Remove log4j from being transitively pulled in. re-enable test * Update log4j exclusions with new bridge. Update test to check for core class * Move dependency to root pom in dep management section * Update changelog, add changelog for HS6 with ES requirements. Remove dead comment * update testst, remove todos * Add hibernate props provider method, add elastic regexp query * Add todo for high level client builder * Modify ElasticsearchRestClientFactory to support HTTPS hosts * Remove protocol extraction from rest url * do we even need to index this...? * remove purge of non-indexed resources * Add longer timeout to testcontainers * Add task to add docker CLI to azure pipeline via task, for testcontainers * WIP remove this * Dont update version * Add test for duplicate termconcepts * Use real hibernate ORM methods * merge issues * Add partitionsettigns bean to match master * Trying to debug testcontainers on azure... * Trying to debug testcontainers on azure... * Run even on previous failure * Fix testcontainer port binds * Add full text logs attaching, and test reporting * I love yaml * Remove comment * Refactor delta remove to not do a once-over traversal and flatten all children pre-delete. Resolves transaction commit boundary issue * Modify TX beheaviour * wip * Rework to use config and mocked beans for partition-aware lastN * rework to not use beans and just inject vars i need for test, keps test envs cleaner * update azure pipeline to only copy test results * Remove erroneous publish * Always get test logs * Make it so indexing works, * Add todos * revert deleteByPid * Work on test fixes * Test fixes * Test fixes * Another test fix * Test fix * Work on tests * Test fixes * All tests passing locally * Fix test failure * FIx build error * Rename usages of HibernateDialectProvider -> HibernatePropertiesProvider * Update ngram of token filters * Tidying * subvert the purpose of hibernate elastic props builder to inject template * dead space * Rename changed class Co-authored-by: jamesagnew --- azure-pipelines.yml | 21 + .../fhir/rest/gclient/StringClientParam.java | 1 + hapi-fhir-cli/hapi-fhir-cli-api/pom.xml | 7 +- .../ca/uhn/fhir/jpa/demo/CommonConfig.java | 19 +- .../5_3_0/2190-hibernate-search-6.yaml | 5 + .../hapi/fhir/changelog/5_3_0/changes.yaml | 6 +- .../ca/uhn/hapi/fhir/docs/server_jpa/lastn.md | 2 +- hapi-fhir-elasticsearch-6/pom.xml | 144 ---- hapi-fhir-jpaserver-api/pom.xml | 8 +- hapi-fhir-jpaserver-base/pom.xml | 77 +- .../jpa/bulk/job/ResourceToFileWriter.java | 5 +- .../ca/uhn/fhir/jpa/config/BaseConfig.java | 4 +- ....java => HibernatePropertiesProvider.java} | 14 +- .../ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java | 14 +- .../ca/uhn/fhir/jpa/dao/BaseStorageDao.java | 2 +- .../fhir/jpa/dao/FulltextSearchSvcImpl.java | 358 ++------- .../uhn/fhir/jpa/dao/IFulltextSearchSvc.java | 4 +- .../uhn/fhir/jpa/dao/IHapiJpaRepository.java | 29 - .../dao/data/ITermCodeSystemVersionDao.java | 4 - .../fhir/jpa/dao/data/ITermConceptDao.java | 10 +- .../dao/data/ITermConceptDesignationDao.java | 10 +- .../data/ITermConceptParentChildLinkDao.java | 14 +- .../jpa/dao/data/ITermConceptPropertyDao.java | 10 +- .../dao/expunge/ExpungeEverythingService.java | 3 +- .../dao/predicate/PredicateBuilderCoords.java | 23 +- .../java/ca/uhn/fhir/jpa/entity/MdmLink.java | 7 +- .../jpa/entity/TermCodeSystemVersion.java | 7 +- .../ca/uhn/fhir/jpa/entity/TermConcept.java | 51 +- .../jpa/entity/TermConceptDesignation.java | 3 + .../entity/TermConceptParentChildLink.java | 5 +- .../fhir/jpa/entity/TermConceptProperty.java | 4 + .../jpa/entity/TermConceptPropertyBinder.java | 77 ++ .../TermConceptPropertyFieldBridge.java | 70 -- .../jpa/provider/JpaSystemProviderDstu2.java | 34 - .../dstu3/JpaSystemProviderDstu3.java | 31 - .../jpa/provider/r4/JpaSystemProviderR4.java | 33 - .../jpa/provider/r5/JpaSystemProviderR5.java | 33 - .../DeferConceptIndexingInterceptor.java | 55 -- .../DeferConceptIndexingRoutingBinder.java | 53 ++ .../search/HapiLuceneAnalysisConfigurer.java | 91 +++ .../search/LuceneSearchMappingFactory.java | 78 -- .../jpa/search/builder/SearchBuilder.java | 4 +- .../predicate/CoordsPredicateBuilder.java | 22 +- .../builder/sql/SearchQueryBuilder.java | 4 +- ...asticsearchHibernatePropertiesBuilder.java | 108 ++- .../elastic/ElasticsearchMappingProvider.java | 69 -- .../HapiElasticsearchAnalysisConfigurer.java | 87 +++ .../lastn/ElasticsearchRestClientFactory.java | 28 +- .../search/lastn/ElasticsearchSvcImpl.java | 84 ++- .../reindex/ResourceReindexingSvcImpl.java | 28 +- .../fhir/jpa/term/BaseTermReadSvcImpl.java | 703 ++++++++++-------- .../term/TermCodeSystemStorageSvcImpl.java | 335 +++++---- .../jpa/term/TermDeferredStorageSvcImpl.java | 197 ++++- ...ansionComponentWithConceptAccumulator.java | 2 +- .../jpa/term/api/ITermDeferredStorageSvc.java | 4 + .../CircularQueueCaptureQueriesListener.java | 17 +- .../ca/uhn/fhir/jpa/util/CoordCalculator.java | 30 +- .../java/ca/uhn/fhir/jpa/util/SearchBox.java | 45 -- .../java/ca/uhn/fhir/jpa/util/TestUtil.java | 4 +- .../lastn/ObservationCodeIndexSchema.json | 46 +- .../search/lastn/ObservationIndexSchema.json | 2 - .../uhn/fhir/jpa/config/TestDstu2Config.java | 16 +- .../uhn/fhir/jpa/config/TestDstu3Config.java | 17 +- .../ca/uhn/fhir/jpa/config/TestR4Config.java | 13 +- .../config/TestR4ConfigWithElasticSearch.java | 59 +- .../TestR4ConfigWithElasticsearchClient.java | 12 +- .../TestR4WithLuceneDisabledConfig.java | 8 +- .../ca/uhn/fhir/jpa/config/TestR5Config.java | 12 +- .../java/ca/uhn/fhir/jpa/dao/BaseJpaTest.java | 84 +++ .../fhir/jpa/dao/dstu2/BaseJpaDstu2Test.java | 12 +- .../FhirResourceDaoDstu2SearchFtTest.java | 109 --- .../jpa/dao/dstu2/FhirSystemDaoDstu2Test.java | 1 - .../fhir/jpa/dao/dstu3/BaseJpaDstu3Test.java | 15 +- .../FhirResourceDaoDstu3CodeSystemTest.java | 14 + .../FhirResourceDaoDstu3SearchFtTest.java | 111 --- .../predicate/PredicateBuilderCoordsTest.java | 20 +- .../ca/uhn/fhir/jpa/dao/r4/BaseJpaR4Test.java | 84 ++- .../r4/FhirResourceDaoR4CodeSystemTest.java | 4 +- .../dao/r4/FhirResourceDaoR4DeleteTest.java | 1 - .../dao/r4/FhirResourceDaoR4SearchFtTest.java | 126 ---- .../r4/FhirResourceDaoR4SearchLastNIT.java | 1 - .../jpa/dao/r4/FhirResourceDaoR4Test.java | 41 +- .../ca/uhn/fhir/jpa/dao/r5/BaseJpaR5Test.java | 55 +- .../r5/FhirResourceDaoR5CodeSystemTest.java | 4 +- .../jpa/provider/SystemProviderDstu2Test.java | 93 --- .../ResourceProviderDstu3CodeSystemTest.java | 7 +- .../ResourceProviderDstu3ValueSetTest.java | 15 + ...rceProviderDstu3ValueSetVersionedTest.java | 126 +--- .../r4/BaseResourceProviderR4Test.java | 4 + ...sentInterceptorResourceProviderR4Test.java | 15 +- .../r4/ResourceProviderR4CodeSystemTest.java | 2 + ...urceProviderR4CodeSystemVersionedTest.java | 2 + ...rceProviderR4ValueSetNoVerCSNoVerTest.java | 59 +- ...ourceProviderR4ValueSetVerCSNoVerTest.java | 55 +- ...esourceProviderR4ValueSetVerCSVerTest.java | 145 +--- .../jpa/provider/r4/SystemProviderR4Test.java | 98 --- .../r5/ResourceProviderR5ValueSetTest.java | 137 +--- ...sourceProviderR5ValueSetVersionedTest.java | 120 +-- .../sql/SearchQueryBuilderMySqlTest.java | 8 +- .../builder/sql/SearchQueryBuilderTest.java | 31 +- ...lasticsearchSvcMultipleObservationsIT.java | 24 +- ...tNElasticsearchSvcSingleObservationIT.java | 26 +- .../lastn/config/TestElasticsearchConfig.java | 62 -- .../TestElasticsearchContainerHelper.java | 20 + .../subscription/SubscriptionTestUtil.java | 5 + .../email/EmailSubscriptionDstu2Test.java | 8 +- .../TerminologyLoaderSvcLoincJpaTest.java | 6 + .../jpa/term/TerminologySvcDeltaR4Test.java | 15 +- .../jpa/term/TerminologySvcImplDstu3Test.java | 11 +- .../jpa/term/TerminologySvcImplR4Test.java | 3 +- .../jpa/term/ValueSetExpansionR4Test.java | 703 +++--------------- .../fhir/jpa/util/CoordCalculatorTest.java | 40 +- .../uhn/fhir/jpa/util/JpaClasspathTest.java | 5 +- .../src/test/resources/logback-test.xml | 22 +- hapi-fhir-jpaserver-batch/pom.xml | 2 +- hapi-fhir-jpaserver-mdm/pom.xml | 2 +- .../uhn/fhir/jpa/mdm/dao/MdmLinkDaoSvc.java | 2 +- hapi-fhir-jpaserver-model/pom.xml | 8 +- .../model/cross/IBasePersistedResource.java | 4 +- .../jpa/model/entity/BaseHasResource.java | 5 +- .../BaseResourceIndexedSearchParam.java | 13 +- .../ResourceIndexedSearchParamCoords.java | 6 +- .../ResourceIndexedSearchParamDate.java | 8 +- .../ResourceIndexedSearchParamNumber.java | 10 +- .../ResourceIndexedSearchParamQuantity.java | 17 +- .../ResourceIndexedSearchParamString.java | 23 +- .../ResourceIndexedSearchParamToken.java | 8 +- .../entity/ResourceIndexedSearchParamUri.java | 4 +- .../fhir/jpa/model/entity/ResourceLink.java | 17 +- .../fhir/jpa/model/entity/ResourceTable.java | 65 +- .../search/IndexNonDeletedInterceptor.java | 66 -- .../search/ResourceTableRoutingBinder.java | 53 ++ .../util/BigDecimalNumericFieldBridge.java | 62 -- .../uhn/fhir/jpa/model/util/JpaConstants.java | 4 - hapi-fhir-jpaserver-searchparam/pom.xml | 4 +- .../extractor/BaseSearchParamExtractor.java | 8 +- .../extractor/GeopointNormalizer.java | 82 ++ .../uhn/fhir/jpa/config/TestJpaR4Config.java | 15 +- .../uhn/fhirtest/config/TestDstu2Config.java | 17 +- .../uhn/fhirtest/config/TestDstu3Config.java | 17 +- .../ca/uhn/fhirtest/config/TestR4Config.java | 16 +- .../ca/uhn/fhirtest/config/TestR5Config.java | 17 +- hapi-fhir-oauth2/pom.xml | 2 +- pom.xml | 88 ++- 144 files changed, 2708 insertions(+), 3868 deletions(-) create mode 100644 hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_3_0/2190-hibernate-search-6.yaml delete mode 100644 hapi-fhir-elasticsearch-6/pom.xml rename hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/{HibernateDialectProvider.java => HibernatePropertiesProvider.java} (70%) delete mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/IHapiJpaRepository.java create mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptPropertyBinder.java delete mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptPropertyFieldBridge.java delete mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/DeferConceptIndexingInterceptor.java create mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/DeferConceptIndexingRoutingBinder.java create mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/HapiLuceneAnalysisConfigurer.java delete mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/LuceneSearchMappingFactory.java delete mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/elastic/ElasticsearchMappingProvider.java create mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/elastic/HapiElasticsearchAnalysisConfigurer.java delete mode 100644 hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/SearchBox.java delete mode 100644 hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/lastn/config/TestElasticsearchConfig.java create mode 100644 hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/lastn/config/TestElasticsearchContainerHelper.java delete mode 100644 hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/search/IndexNonDeletedInterceptor.java create mode 100644 hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/search/ResourceTableRoutingBinder.java delete mode 100644 hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/util/BigDecimalNumericFieldBridge.java create mode 100644 hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/GeopointNormalizer.java diff --git a/azure-pipelines.yml b/azure-pipelines.yml index 5aecb406cba..b026fee6b54 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -17,6 +17,11 @@ jobs: timeoutInMinutes: 360 container: maven:3-jdk-11 steps: + - task: DockerInstaller@0 + displayName: Docker Installer + inputs: + dockerVersion: 17.09.0-ce + releaseType: stable - task: Cache@2 inputs: key: 'maven | "$(Agent.OS)" | ./pom.xml' @@ -35,8 +40,24 @@ jobs: # These are JVM options (and don't show up in the build logs) mavenOptions: '-Xmx1024m $(MAVEN_OPTS) -Dorg.slf4j.simpleLogger.showDateTime=true -Dorg.slf4j.simpleLogger.dateTimeFormat=HH:mm:ss,SSS -Duser.timezone=America/Toronto' jdkVersionOption: 1.11 + - task: CopyFiles@2 + condition: always() + inputs: + sourceFolder: '$(System.DefaultWorkingDirectory)/' + contents: '**/target/*output.txt' + targetFolder: '$(Build.ArtifactStagingDirectory)' + - task: PublishPipelineArtifact@1 + displayName: 'Publish Full Test Output' + condition: always() + inputs: + targetPath: '$(Build.ArtifactStagingDirectory)/' + artifactName: 'full_logs.zip' - script: bash <(curl https://codecov.io/bash) -t $(CODECOV_TOKEN) displayName: 'codecov' + - task: PublishTestResults@2 + inputs: + testResultsFormat: 'JUnit' + testResultsFiles: '**/TEST-*.xml' - task: PublishCodeCoverageResults@1 inputs: codeCoverageTool: 'JaCoCo' diff --git a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/StringClientParam.java b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/StringClientParam.java index e999f98aa40..fc636ab4804 100644 --- a/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/StringClientParam.java +++ b/hapi-fhir-base/src/main/java/ca/uhn/fhir/rest/gclient/StringClientParam.java @@ -144,6 +144,7 @@ public class StringClientParam extends BaseClientParam implements IParam { return new StringCriterion<>(getParamName(), theValue); } + @Override public ICriterion value(IPrimitiveType theValue) { return new StringCriterion<>(getParamName(), theValue.getValue()); diff --git a/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml b/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml index a23b30eccf9..1d373e7221d 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml +++ b/hapi-fhir-cli/hapi-fhir-cli-api/pom.xml @@ -254,8 +254,13 @@ awaitility test + + org.rauschig + jarchivelib + test + - + diff --git a/hapi-fhir-cli/hapi-fhir-cli-jpaserver/src/main/java/ca/uhn/fhir/jpa/demo/CommonConfig.java b/hapi-fhir-cli/hapi-fhir-cli-jpaserver/src/main/java/ca/uhn/fhir/jpa/demo/CommonConfig.java index 2d2485b80ca..b0668955161 100644 --- a/hapi-fhir-cli/hapi-fhir-cli-jpaserver/src/main/java/ca/uhn/fhir/jpa/demo/CommonConfig.java +++ b/hapi-fhir-cli/hapi-fhir-cli-jpaserver/src/main/java/ca/uhn/fhir/jpa/demo/CommonConfig.java @@ -23,10 +23,14 @@ package ca.uhn.fhir.jpa.demo; import ca.uhn.fhir.jpa.api.config.DaoConfig; import ca.uhn.fhir.jpa.model.config.PartitionSettings; import ca.uhn.fhir.jpa.model.entity.ModelConfig; -import ca.uhn.fhir.jpa.search.LuceneSearchMappingFactory; +import ca.uhn.fhir.jpa.search.HapiLuceneAnalysisConfigurer; import org.apache.commons.dbcp2.BasicDataSource; import org.apache.commons.lang3.time.DateUtils; import org.hibernate.dialect.H2Dialect; +import org.hibernate.search.backend.lucene.cfg.LuceneBackendSettings; +import org.hibernate.search.backend.lucene.cfg.LuceneIndexSettings; +import org.hibernate.search.engine.cfg.BackendSettings; +import org.hibernate.search.mapper.orm.cfg.HibernateOrmMapperSettings; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; @@ -81,6 +85,8 @@ public class CommonConfig { @Bean public Properties jpaProperties() { Properties extraProperties = new Properties(); + + //Regular Hibernate Settings extraProperties.put("hibernate.dialect", H2Dialect.class.getName()); extraProperties.put("hibernate.format_sql", "true"); extraProperties.put("hibernate.show_sql", "false"); @@ -90,14 +96,13 @@ public class CommonConfig { extraProperties.put("hibernate.cache.use_second_level_cache", "false"); extraProperties.put("hibernate.cache.use_structured_entries", "false"); extraProperties.put("hibernate.cache.use_minimal_puts", "false"); - extraProperties.put("hibernate.search.model_mapping", LuceneSearchMappingFactory.class.getName()); - extraProperties.put("hibernate.search.default.directory_provider", "filesystem"); - extraProperties.put("hibernate.search.default.indexBase", "target/lucenefiles"); - extraProperties.put("hibernate.search.lucene_version", "LUCENE_CURRENT"); - extraProperties.put("hibernate.search.default.worker.execution", "async"); + extraProperties.put(BackendSettings.backendKey(LuceneBackendSettings.ANALYSIS_CONFIGURER), HapiLuceneAnalysisConfigurer.class.getName()); + extraProperties.put(BackendSettings.backendKey(LuceneIndexSettings.DIRECTORY_TYPE), "local-filesystem"); + extraProperties.put(BackendSettings.backendKey(LuceneIndexSettings.DIRECTORY_ROOT), "target/lucenefiles"); + extraProperties.put(BackendSettings.backendKey(LuceneBackendSettings.LUCENE_VERSION), "LUCENE_CURRENT"); if (System.getProperty("lowmem") != null) { - extraProperties.put("hibernate.search.autoregister_listeners", "false"); + extraProperties.put(HibernateOrmMapperSettings.ENABLED, "false"); } return extraProperties; diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_3_0/2190-hibernate-search-6.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_3_0/2190-hibernate-search-6.yaml new file mode 100644 index 00000000000..88f8e3c1171 --- /dev/null +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_3_0/2190-hibernate-search-6.yaml @@ -0,0 +1,5 @@ +--- +type: change +issue: 2190 +title: "Updates to Hibernate Search require a full reindexing of all indexed fulltext data, which is held in Lucene or Elasticsearch. + Users using elasticsearch for fulltext indexing must upgrade to Elasticsearch 7.10.0." diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_3_0/changes.yaml b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_3_0/changes.yaml index b2dce45eaa9..24259fb2ed8 100644 --- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_3_0/changes.yaml +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/changelog/5_3_0/changes.yaml @@ -6,9 +6,13 @@
  • SLF4j (All Modules): 1.7.28 -> 1.7.30
  • Woodstox (XML FHIR Parser): 4.4.1 -> 6.2.3 (Note that the Maven groupId has changed from org.codehaus.woodstox to com.fasterxml.woodstox and the Maven artifactId has changed from woodstox-core-asl to woodstox-core for this library)
  • -
  • Hibernate ORM (JPA): 5.4.22 -> 5.4.26
  • Spring (JPA): 5.2.3.RELEASE -> 5.2.9.RELEASE
  • Datasource-Proxy (JPA): 1.5.1 -> 1.7
  • Jetty (JPA Starter): 9.4.30.v20200611 -> 9.4.35.v20201120
  • +
  • Hibernate ORM (JPA Server): 5.4.22.FINAL -> 5.4.26.FINAL
  • +
  • Spring (JPA Server): 5.2.9.RELEASE -> 5.3.2
  • +
  • Spring Data (JPA Server): 2.2.0.RELEASE -> 2.4.2
  • +
  • Hibernate Search (JPA Server): 5.11.5.FINAL -> 6.0.0.Final
  • +
  • Lucene(HAPI FHIR JPA Server): 5.5.5 -> 8.7.0
  • Spring Boot (JPA Starter): 2.2.6.RELEASE -> 2.4.1
" diff --git a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/server_jpa/lastn.md b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/server_jpa/lastn.md index 7f526541678..4f70dac2307 100644 --- a/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/server_jpa/lastn.md +++ b/hapi-fhir-docs/src/main/resources/ca/uhn/hapi/fhir/docs/server_jpa/lastn.md @@ -16,7 +16,7 @@ As described in the [FHIR specification](http://hl7.org/fhir/observation-operati # Limitations -Currently only Elasticsearch versions up to 6.5.4 are supported. +Currently only Elasticsearch version 7.10.0 is officially supported. Search parameters other than those listed above are currently not supported. diff --git a/hapi-fhir-elasticsearch-6/pom.xml b/hapi-fhir-elasticsearch-6/pom.xml deleted file mode 100644 index d0c97094ec8..00000000000 --- a/hapi-fhir-elasticsearch-6/pom.xml +++ /dev/null @@ -1,144 +0,0 @@ - - - - 4.0.0 - - - ca.uhn.hapi.fhir - hapi-deployable-pom - 5.3.0-SNAPSHOT - ../hapi-deployable-pom/pom.xml - - - hapi-fhir-elasticsearch-6 - - hapi-fhir-elasticsearch-6 - - - UTF-8 - 1.7 - 1.7 - - - - - junit - junit - 4.12 - test - - - - org.elasticsearch.client - elasticsearch-rest-high-level-client - 6.5.4 - - - - com.fasterxml.jackson.core - jackson-core - - - com.fasterxml.jackson.dataformat - * - - - com.github.spullara.mustache.java - compiler - - - com.tdunning - t-digest - - - commons-codec - commons-codec - - - commons-logging - commons-logging - - - net.bytebuddy - byte-buddy - - - net.sf.jopt-simple - jopt-simple - - - org.apache.httpcomponents - * - - - org.apache.lucene - lucene-analyzers-common - - - org.apache.lucene - lucene-backward-codecs - - - org.apache.lucene - lucene-sandbox - - - org.elasticsearch - jna - - - org.hdrhistogram - HdrHistogram - - - org.yaml - snakeyaml - - - - - - - - - maven-shade-plugin - 3.2.1 - - - package - - shade - - - true - shaded6 - - - com.carrotsearch.hppc - com.shadehapi.carrotsearch.hppc - - - org.apache.logging.log4j - org.shadehapi.apache.logging.log4j - - - org.apache.lucene - org.shadehapi.apache.lucene - - - org.elasticsearch - org.shadehapi.elasticsearch - - - org.joda - org.shadehapi.joda - - - - - - - - - diff --git a/hapi-fhir-jpaserver-api/pom.xml b/hapi-fhir-jpaserver-api/pom.xml index cfc4eba286e..843ebfd672a 100644 --- a/hapi-fhir-jpaserver-api/pom.xml +++ b/hapi-fhir-jpaserver-api/pom.xml @@ -89,8 +89,12 @@ - org.hibernate - hibernate-search-orm + org.hibernate.search + hibernate-search-mapper-orm + + + org.hibernate.search + hibernate-search-backend-elasticsearch diff --git a/hapi-fhir-jpaserver-base/pom.xml b/hapi-fhir-jpaserver-base/pom.xml index 9abe1ce49b6..4322697f967 100644 --- a/hapi-fhir-jpaserver-base/pom.xml +++ b/hapi-fhir-jpaserver-base/pom.xml @@ -150,18 +150,6 @@ hapi-fhir-jpaserver-batch ${project.version} - - ca.uhn.hapi.fhir - hapi-fhir-elasticsearch-6 - ${project.version} - shaded6 - - - org.apache.logging.log4j - log4j-api - - - net.ttddyy @@ -478,30 +466,48 @@ javax.el + + + org.apache.logging.log4j + log4j-to-slf4j + - org.hibernate - hibernate-search-orm + org.hibernate.search + hibernate-search-mapper-orm + + + org.apache.logging.log4j + log4j-api + + - org.apache.lucene - lucene-highlighter + org.elasticsearch.client + elasticsearch-rest-high-level-client + + + org.apache.logging.log4j + log4j-api + + + + + org.hibernate.search + hibernate-search-backend-elasticsearch + + + org.hibernate.search + hibernate-search-backend-lucene org.apache.lucene lucene-analyzers-phonetic - org.hibernate - hibernate-search-elasticsearch - - - commons-logging - commons-logging - - + org.apache.lucene + lucene-backward-codecs - com.google.guava @@ -579,9 +585,24 @@ test - pl.allegro.tech - embedded-elasticsearch - 2.10.0 + org.hamcrest + hamcrest + test + + + org.testcontainers + testcontainers + test + + + org.testcontainers + elasticsearch + test + + + org.testcontainers + junit-jupiter + test org.hl7.fhir.testcases diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/ResourceToFileWriter.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/ResourceToFileWriter.java index 0e5b88ffab0..74468627856 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/ResourceToFileWriter.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/bulk/job/ResourceToFileWriter.java @@ -23,6 +23,7 @@ package ca.uhn.fhir.jpa.bulk.job; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.jpa.api.dao.DaoRegistry; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; +import ca.uhn.fhir.jpa.api.model.DaoMethodOutcome; import ca.uhn.fhir.jpa.batch.log.Logs; import ca.uhn.fhir.jpa.bulk.svc.BulkExportDaoSvc; import ca.uhn.fhir.jpa.entity.BulkExportCollectionFileEntity; @@ -100,10 +101,10 @@ public class ResourceToFileWriter implements ItemWriter> { binary.setContentType(Constants.CT_FHIR_NDJSON); binary.setContent(myOutputStream.toByteArray()); - return myBinaryDao.create(binary).getResource().getIdElement(); + DaoMethodOutcome outcome = myBinaryDao.create(binary); + return outcome.getResource().getIdElement(); } - @SuppressWarnings("unchecked") private IFhirResourceDao getBinaryDao() { return myDaoRegistry.getResourceDao("Binary"); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseConfig.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseConfig.java index dbce1b288ea..bfe36f6d018 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseConfig.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/BaseConfig.java @@ -473,8 +473,8 @@ public abstract class BaseConfig { } @Bean - public HibernateDialectProvider hibernateDialectProvider() { - return new HibernateDialectProvider(); + public HibernatePropertiesProvider HibernatePropertiesProvider() { + return new HibernatePropertiesProvider(); } @Bean diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/HibernateDialectProvider.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/HibernatePropertiesProvider.java similarity index 70% rename from hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/HibernateDialectProvider.java rename to hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/HibernatePropertiesProvider.java index 8f04ea43c3e..788332cd7f3 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/HibernateDialectProvider.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/config/HibernatePropertiesProvider.java @@ -21,17 +21,20 @@ package ca.uhn.fhir.jpa.config; */ import ca.uhn.fhir.util.ReflectionUtil; +import org.apache.commons.lang3.StringUtils; import com.google.common.annotations.VisibleForTesting; import org.apache.commons.lang3.Validate; import org.hibernate.dialect.Dialect; +import org.hibernate.search.engine.cfg.BackendSettings; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean; -public class HibernateDialectProvider { +public class HibernatePropertiesProvider { @Autowired private LocalContainerEntityManagerFactoryBean myEntityManagerFactory; private Dialect myDialect; + private String myHibernateSearchBackend; @VisibleForTesting public void setDialectForUnitTest(Dialect theDialect) { @@ -49,4 +52,13 @@ public class HibernateDialectProvider { return dialect; } + public String getHibernateSearchBackend(){ + String hibernateSearchBackend = myHibernateSearchBackend; + if (StringUtils.isBlank(hibernateSearchBackend)) { + hibernateSearchBackend = (String) myEntityManagerFactory.getJpaPropertyMap().get(BackendSettings.backendKey(BackendSettings.TYPE)); + Validate.notNull(hibernateSearchBackend, BackendSettings.backendKey(BackendSettings.TYPE) + " property is unset!"); + myHibernateSearchBackend = hibernateSearchBackend; + } + return myHibernateSearchBackend; + } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java index 5b2d5c359d2..48e3bb209a7 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseHapiFhirDao.java @@ -1031,8 +1031,8 @@ public abstract class BaseHapiFhirDao extends BaseStora entity.setDeleted(theDeletedTimestampOrNull); entity.setUpdated(theDeletedTimestampOrNull); - entity.setNarrativeTextParsedIntoWords(null); - entity.setContentTextParsedIntoWords(null); + entity.setNarrativeText(null); + entity.setContentText(null); entity.setHashSha256(null); entity.setIndexStatus(INDEX_STATUS_INDEXED); changed = populateResourceIntoEntity(theRequest, theResource, entity, true); @@ -1058,8 +1058,8 @@ public abstract class BaseHapiFhirDao extends BaseStora newParams.populateResourceTableSearchParamsPresentFlags(entity); entity.setIndexStatus(INDEX_STATUS_INDEXED); - populateFullTextFields(myContext, theResource, entity); } + populateFullTextFields(myContext, theResource, entity); } else { changed = populateResourceIntoEntity(theRequest, theResource, entity, false); @@ -1481,11 +1481,11 @@ public abstract class BaseHapiFhirDao extends BaseStora public static void populateFullTextFields(final FhirContext theContext, final IBaseResource theResource, ResourceTable theEntity) { if (theEntity.getDeleted() != null) { - theEntity.setNarrativeTextParsedIntoWords(null); - theEntity.setContentTextParsedIntoWords(null); + theEntity.setNarrativeText(null); + theEntity.setContentText(null); } else { - theEntity.setNarrativeTextParsedIntoWords(parseNarrativeTextIntoWords(theResource)); - theEntity.setContentTextParsedIntoWords(parseContentTextIntoWords(theContext, theResource)); + theEntity.setNarrativeText(parseNarrativeTextIntoWords(theResource)); + theEntity.setContentText(parseContentTextIntoWords(theContext, theResource)); } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseStorageDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseStorageDao.java index 760c0c90e8e..8208b935dee 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseStorageDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/BaseStorageDao.java @@ -148,7 +148,7 @@ public abstract class BaseStorageDao { } outcome.setId(id); - if (theEntity.isDeleted() == false) { + if (theEntity.getDeleted() == null) { outcome.setResource(theResource); } outcome.setEntity(theEntity); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FulltextSearchSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FulltextSearchSvcImpl.java index 9261ceb3b82..51e5f2dc559 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FulltextSearchSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/FulltextSearchSvcImpl.java @@ -21,7 +21,6 @@ package ca.uhn.fhir.jpa.dao; */ import ca.uhn.fhir.interceptor.model.RequestPartitionId; -import ca.uhn.fhir.jpa.api.config.DaoConfig; import ca.uhn.fhir.jpa.dao.data.IForcedIdDao; import ca.uhn.fhir.jpa.dao.index.IdHelperService; import ca.uhn.fhir.jpa.model.config.PartitionSettings; @@ -41,17 +40,21 @@ import com.google.common.collect.Sets; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.Validate; import org.apache.lucene.analysis.Analyzer; -import org.apache.lucene.search.Query; +import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.search.highlight.Formatter; import org.apache.lucene.search.highlight.Highlighter; -import org.apache.lucene.search.highlight.QueryScorer; import org.apache.lucene.search.highlight.Scorer; +import org.apache.lucene.search.highlight.TextFragment; import org.apache.lucene.search.highlight.TokenGroup; -import org.hibernate.search.jpa.FullTextEntityManager; -import org.hibernate.search.jpa.FullTextQuery; -import org.hibernate.search.query.dsl.BooleanJunction; -import org.hibernate.search.query.dsl.QueryBuilder; +import org.hibernate.search.backend.lucene.index.LuceneIndexManager; +import org.hibernate.search.engine.search.predicate.dsl.BooleanPredicateClausesStep; +import org.hibernate.search.engine.search.predicate.dsl.SearchPredicateFactory; +import org.hibernate.search.engine.search.query.SearchQuery; +import org.hibernate.search.mapper.orm.Search; +import org.hibernate.search.mapper.orm.mapping.SearchMapping; +import org.hibernate.search.mapper.orm.session.SearchSession; import org.hl7.fhir.instance.model.api.IAnyResource; +import org.jetbrains.annotations.NotNull; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.transaction.PlatformTransactionManager; import org.springframework.transaction.annotation.Transactional; @@ -60,12 +63,14 @@ import org.springframework.transaction.support.TransactionTemplate; import javax.persistence.EntityManager; import javax.persistence.PersistenceContext; import javax.persistence.PersistenceContextType; +import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; +import java.util.stream.Collectors; import static org.apache.commons.lang3.StringUtils.isNotBlank; @@ -74,15 +79,13 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc { @PersistenceContext(type = PersistenceContextType.TRANSACTION) private EntityManager myEntityManager; + @Autowired private PlatformTransactionManager myTxManager; @Autowired protected IForcedIdDao myForcedIdDao; - @Autowired - private DaoConfig myDaoConfig; - @Autowired private IdHelperService myIdHelperService; @@ -95,136 +98,81 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc { super(); } - private void addTextSearch(QueryBuilder theQueryBuilder, BooleanJunction theBoolean, List> theTerms, String theFieldName, String theFieldNameEdgeNGram, String theFieldNameNGram) { + private void addTextSearch(SearchPredicateFactory f, BooleanPredicateClausesStep b, List> theTerms, String theFieldName, String theFieldNameEdgeNGram, String theFieldNameTextNGram){ if (theTerms == null) { return; } for (List nextAnd : theTerms) { - Set terms = new HashSet<>(); - for (IQueryParameterType nextOr : nextAnd) { - StringParam nextOrString = (StringParam) nextOr; - String nextValueTrimmed = StringUtils.defaultString(nextOrString.getValue()).trim(); - if (isNotBlank(nextValueTrimmed)) { - terms.add(nextValueTrimmed); - } - } - if (terms.isEmpty() == false) { + Set terms = extractOrStringParams(nextAnd); if (terms.size() == 1) { - //@formatter:off - Query textQuery = theQueryBuilder - .phrase() - .withSlop(2) - .onField(theFieldName).boostedTo(4.0f) -// .andField(theFieldNameEdgeNGram).boostedTo(2.0f) -// .andField(theFieldNameNGram).boostedTo(1.0f) - .sentence(terms.iterator().next().toLowerCase()).createQuery(); - //@formatter:on - - theBoolean.must(textQuery); - } else { + b.must(f.phrase() + .field(theFieldName) + .boost(4.0f) + .matching(terms.iterator().next().toLowerCase()) + .slop(2)); + } else if (terms.size() > 1){ String joinedTerms = StringUtils.join(terms, ' '); - theBoolean.must(theQueryBuilder.keyword().onField(theFieldName).matching(joinedTerms).createQuery()); + b.must(f.match().field(theFieldName).matching(joinedTerms)); + } else { + ourLog.debug("No Terms found in query parameter {}", nextAnd); } } } + + @NotNull + private Set extractOrStringParams(List nextAnd) { + Set terms = new HashSet<>(); + for (IQueryParameterType nextOr : nextAnd) { + StringParam nextOrString = (StringParam) nextOr; + String nextValueTrimmed = StringUtils.defaultString(nextOrString.getValue()).trim(); + if (isNotBlank(nextValueTrimmed)) { + terms.add(nextValueTrimmed); + } + } + return terms; } private List doSearch(String theResourceName, SearchParameterMap theParams, ResourcePersistentId theReferencingPid) { - FullTextEntityManager em = org.hibernate.search.jpa.Search.getFullTextEntityManager(myEntityManager); - List pids = null; - - /* - * Handle textual params - */ - /* - for (String nextParamName : theParams.keySet()) { - for (List nextAndList : theParams.get(nextParamName)) { - for (Iterator orIterator = nextAndList.iterator(); orIterator.hasNext();) { - IQueryParameterType nextParam = orIterator.next(); - if (nextParam instanceof TokenParam) { - TokenParam nextTokenParam = (TokenParam) nextParam; - if (nextTokenParam.isText()) { - orIterator.remove(); - QueryBuilder qb = em.getSearchFactory().buildQueryBuilder().forEntity(ResourceIndexedSearchParamString.class).get(); - BooleanJunction bool = qb.bool(); - - bool.must(qb.keyword().onField("myParamName").matching(nextParamName).createQuery()); - if (isNotBlank(theResourceName)) { - bool.must(qb.keyword().onField("myResourceType").matching(theResourceName).createQuery()); - } -// - //@formatter:off - String value = nextTokenParam.getValue().toLowerCase(); - bool.must(qb.keyword().onField("myValueTextEdgeNGram").matching(value).createQuery()); - - //@formatter:on - - FullTextQuery ftq = em.createFullTextQuery(bool.createQuery(), ResourceIndexedSearchParamString.class); - - List resultList = ftq.getResultList(); - pids = new ArrayList(); - for (Object next : resultList) { - ResourceIndexedSearchParamString nextAsArray = (ResourceIndexedSearchParamString) next; - pids.add(nextAsArray.getResourcePid()); - } - } - } - } - } - } - - if (pids != null && pids.isEmpty()) { - return pids; - } - */ - - QueryBuilder qb = em.getSearchFactory().buildQueryBuilder().forEntity(ResourceTable.class).get(); - BooleanJunction bool = qb.bool(); - - /* - * Handle _content parameter (resource body content) - */ + SearchSession session = Search.session(myEntityManager); List> contentAndTerms = theParams.remove(Constants.PARAM_CONTENT); - addTextSearch(qb, bool, contentAndTerms, "myContentText", "myContentTextEdgeNGram", "myContentTextNGram"); - - /* - * Handle _text parameter (resource narrative content) - */ List> textAndTerms = theParams.remove(Constants.PARAM_TEXT); - addTextSearch(qb, bool, textAndTerms, "myNarrativeText", "myNarrativeTextEdgeNGram", "myNarrativeTextNGram"); - if (theReferencingPid != null) { - bool.must(qb.keyword().onField("myResourceLinksField").matching(theReferencingPid.toString()).createQuery()); - } + List longPids = session.search(ResourceTable.class) + //Selects are replacements for projection and convert more cleanly than the old implementation. + .select( + f -> f.field("myId", Long.class) + ) + .where( + f -> f.bool(b -> { + /* + * Handle _content parameter (resource body content) + */ + addTextSearch(f, b, contentAndTerms, "myContentText", "mycontentTextEdgeNGram", "myContentTextNGram"); + /* + * Handle _text parameter (resource narrative content) + */ + addTextSearch(f, b, textAndTerms, "myNarrativeText", "myNarrativeTextEdgeNGram", "myNarrativeTextNGram"); - if (bool.isEmpty()) { - return pids; - } + if (theReferencingPid != null) { + b.must(f.match().field("myResourceLinksField").matching(theReferencingPid.toString())); + } - if (isNotBlank(theResourceName)) { - bool.must(qb.keyword().onField("myResourceType").matching(theResourceName).createQuery()); - } + //DROP EARLY HERE IF BOOL IS EMPTY? - Query luceneQuery = bool.createQuery(); + if (isNotBlank(theResourceName)) { + b.must(f.match().field("myResourceType").matching(theResourceName)); + } + }) + ).fetchAllHits(); - // wrap Lucene query in a javax.persistence.SqlQuery - FullTextQuery jpaQuery = em.createFullTextQuery(luceneQuery, ResourceTable.class); - jpaQuery.setProjection("myId"); + return convertLongsToResourcePersistentIds(longPids); + } - // execute search - List result = jpaQuery.getResultList(); - - ArrayList retVal = new ArrayList<>(); - for (Object object : result) { - Object[] nextArray = (Object[]) object; - Long next = (Long) nextArray[0]; - if (next != null) { - retVal.add(new ResourcePersistentId(next)); - } - } - - return retVal; + private List convertLongsToResourcePersistentIds(List theLongPids) { + return theLongPids.stream() + .map(pid -> new ResourcePersistentId(pid)) + .collect(Collectors.toList()); } @Override @@ -259,8 +207,8 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc { if (retVal == null) { retVal = new TransactionTemplate(myTxManager).execute(t -> { try { - FullTextEntityManager em = org.hibernate.search.jpa.Search.getFullTextEntityManager(myEntityManager); - em.getSearchFactory().buildQueryBuilder().forEntity(ResourceTable.class).get(); + SearchSession searchSession = Search.session(myEntityManager); + searchSession.search(ResourceTable.class); return Boolean.FALSE; } catch (Exception e) { ourLog.trace("FullText test failed", e); @@ -287,172 +235,4 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc { @Autowired private PartitionSettings myPartitionSettings; - @Transactional() - @Override - public List suggestKeywords(String theContext, String theSearchParam, String theText, RequestDetails theRequest) { - Validate.notBlank(theContext, "theContext must be provided"); - Validate.notBlank(theSearchParam, "theSearchParam must be provided"); - Validate.notBlank(theText, "theSearchParam must be provided"); - - long start = System.currentTimeMillis(); - - String[] contextParts = StringUtils.split(theContext, '/'); - if (contextParts.length != 3 || "Patient".equals(contextParts[0]) == false || "$everything".equals(contextParts[2]) == false) { - throw new InvalidRequestException("Invalid context: " + theContext); - } - - // Partitioning is not supported for this operation - Validate.isTrue(myPartitionSettings.isPartitioningEnabled() == false, "Suggest keywords not supported for partitioned system"); - RequestPartitionId requestPartitionId = null; - - ResourcePersistentId pid = myIdHelperService.resolveResourcePersistentIds(requestPartitionId, contextParts[0], contextParts[1]); - - FullTextEntityManager em = org.hibernate.search.jpa.Search.getFullTextEntityManager(myEntityManager); - - QueryBuilder qb = em.getSearchFactory().buildQueryBuilder().forEntity(ResourceTable.class).get(); - - Query textQuery = qb - .phrase() - .withSlop(2) - .onField("myContentText").boostedTo(4.0f) - .andField("myContentTextEdgeNGram").boostedTo(2.0f) - .andField("myContentTextNGram").boostedTo(1.0f) - .andField("myContentTextPhonetic").boostedTo(0.5f) - .sentence(theText.toLowerCase()).createQuery(); - - Query query = qb.bool() - .must(qb.keyword().onField("myResourceLinksField").matching(pid.toString()).createQuery()) - .must(textQuery) - .createQuery(); - - FullTextQuery ftq = em.createFullTextQuery(query, ResourceTable.class); - ftq.setProjection("myContentText"); - ftq.setMaxResults(20); - - List resultList = ftq.getResultList(); - List suggestions = Lists.newArrayList(); - for (Object next : resultList) { - Object[] nextAsArray = (Object[]) next; - String nextValue = (String) nextAsArray[0]; - - try { - MySuggestionFormatter formatter = new MySuggestionFormatter(theText, suggestions); - Scorer scorer = new QueryScorer(textQuery); - Highlighter highlighter = new Highlighter(formatter, scorer); - Analyzer analyzer = em.getSearchFactory().getAnalyzer(ResourceTable.class); - - formatter.setAnalyzer("myContentTextPhonetic"); - highlighter.getBestFragments(analyzer.tokenStream("myContentTextPhonetic", nextValue), nextValue, 10); - - formatter.setAnalyzer("myContentTextNGram"); - highlighter.getBestFragments(analyzer.tokenStream("myContentTextNGram", nextValue), nextValue, 10); - - formatter.setFindPhrasesWith(); - formatter.setAnalyzer("myContentTextEdgeNGram"); - highlighter.getBestFragments(analyzer.tokenStream("myContentTextEdgeNGram", nextValue), nextValue, 10); - - } catch (Exception e) { - throw new InternalErrorException(e); - } - - } - - Collections.sort(suggestions); - - Set terms = Sets.newHashSet(); - for (Iterator iter = suggestions.iterator(); iter.hasNext(); ) { - String nextTerm = iter.next().getTerm().toLowerCase(); - if (!terms.add(nextTerm)) { - iter.remove(); - } - } - - long delay = System.currentTimeMillis() - start; - ourLog.info("Provided {} suggestions for term {} in {} ms", terms.size(), theText, delay); - - return suggestions; - } - - public class MySuggestionFormatter implements Formatter { - - private List mySuggestions; - private String myAnalyzer; - private ArrayList myPartialMatchPhrases; - private ArrayList myPartialMatchScores; - private String myOriginalSearch; - - MySuggestionFormatter(String theOriginalSearch, List theSuggestions) { - myOriginalSearch = theOriginalSearch; - mySuggestions = theSuggestions; - } - - @Override - public String highlightTerm(String theOriginalText, TokenGroup theTokenGroup) { - ourLog.debug("{} Found {} with score {}", myAnalyzer, theOriginalText, theTokenGroup.getTotalScore()); - if (theTokenGroup.getTotalScore() > 0) { - float score = theTokenGroup.getTotalScore(); - if (theOriginalText.equalsIgnoreCase(myOriginalSearch)) { - score = score + 1.0f; - } - mySuggestions.add(new Suggestion(theOriginalText, score)); - } else if (myPartialMatchPhrases != null) { - if (theOriginalText.length() < 100) { - for (int i = 0; i < myPartialMatchPhrases.size(); i++) { - if (theOriginalText.contains(myPartialMatchPhrases.get(i))) { - mySuggestions.add(new Suggestion(theOriginalText, myPartialMatchScores.get(i) - 0.5f)); - } - } - } - } - - return null; - } - - void setAnalyzer(String theString) { - myAnalyzer = theString; - } - - void setFindPhrasesWith() { - myPartialMatchPhrases = new ArrayList<>(); - myPartialMatchScores = new ArrayList<>(); - - for (Suggestion next : mySuggestions) { - myPartialMatchPhrases.add(' ' + next.myTerm); - myPartialMatchScores.add(next.myScore); - } - - myPartialMatchPhrases.add(myOriginalSearch); - myPartialMatchScores.add(1.0f); - } - - } - - public static class Suggestion implements Comparable { - private String myTerm; - private float myScore; - - Suggestion(String theTerm, float theScore) { - myTerm = theTerm; - myScore = theScore; - } - - @Override - public int compareTo(Suggestion theO) { - return Float.compare(theO.myScore, myScore); - } - - public float getScore() { - return myScore; - } - - public String getTerm() { - return myTerm; - } - - @Override - public String toString() { - return "Suggestion[myTerm=" + myTerm + ", myScore=" + myScore + "]"; - } - } - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/IFulltextSearchSvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/IFulltextSearchSvc.java index 6efbda49b2b..ae3e435afe1 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/IFulltextSearchSvc.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/IFulltextSearchSvc.java @@ -22,15 +22,13 @@ package ca.uhn.fhir.jpa.dao; import java.util.List; -import ca.uhn.fhir.jpa.dao.FulltextSearchSvcImpl.Suggestion; import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.rest.api.server.RequestDetails; public interface IFulltextSearchSvc { - List suggestKeywords(String theContext, String theSearchParam, String theText, RequestDetails theRequest); - + List search(String theResourceName, SearchParameterMap theParams); List everything(String theResourceName, SearchParameterMap theParams, RequestDetails theRequest); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/IHapiJpaRepository.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/IHapiJpaRepository.java deleted file mode 100644 index facf8e1d11e..00000000000 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/IHapiJpaRepository.java +++ /dev/null @@ -1,29 +0,0 @@ -package ca.uhn.fhir.jpa.dao; - -/*- - * #%L - * HAPI FHIR JPA Server - * %% - * Copyright (C) 2014 - 2021 Smile CDR, Inc. - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import org.springframework.data.jpa.repository.JpaRepository; - -public interface IHapiJpaRepository extends JpaRepository { - - void deleteByPid(Long theId); - -} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermCodeSystemVersionDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermCodeSystemVersionDao.java index 0a4d738bd02..e61782218f0 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermCodeSystemVersionDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermCodeSystemVersionDao.java @@ -35,10 +35,6 @@ public interface ITermCodeSystemVersionDao extends JpaRepository findByCodeSystemPid(@Param("codesystem_pid") Long theCodeSystemPid); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermConceptDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermConceptDao.java index 8be37505603..4574186808d 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermConceptDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermConceptDao.java @@ -1,12 +1,11 @@ package ca.uhn.fhir.jpa.dao.data; -import ca.uhn.fhir.jpa.dao.IHapiJpaRepository; import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion; import ca.uhn.fhir.jpa.entity.TermConcept; import org.springframework.data.domain.Page; import org.springframework.data.domain.Pageable; import org.springframework.data.domain.Slice; -import org.springframework.data.jpa.repository.Modifying; +import org.springframework.data.jpa.repository.JpaRepository; import org.springframework.data.jpa.repository.Query; import org.springframework.data.repository.query.Param; @@ -33,7 +32,7 @@ import java.util.Optional; * #L% */ -public interface ITermConceptDao extends IHapiJpaRepository { +public interface ITermConceptDao extends JpaRepository { @Query("SELECT COUNT(t) FROM TermConcept t WHERE t.myCodeSystem.myId = :cs_pid") Integer countByCodeSystemVersion(@Param("cs_pid") Long thePid); @@ -50,9 +49,4 @@ public interface ITermConceptDao extends IHapiJpaRepository { @Query("SELECT t FROM TermConcept t WHERE t.myIndexStatus = null") Page findResourcesRequiringReindexing(Pageable thePageRequest); - @Override - @Modifying - @Query("DELETE FROM TermConcept t WHERE t.myId = :pid") - void deleteByPid(@Param("pid") Long theId); - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermConceptDesignationDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermConceptDesignationDao.java index 0d450aa4cd1..c787c9c808e 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermConceptDesignationDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermConceptDesignationDao.java @@ -1,10 +1,9 @@ package ca.uhn.fhir.jpa.dao.data; -import ca.uhn.fhir.jpa.dao.IHapiJpaRepository; import ca.uhn.fhir.jpa.entity.TermConceptDesignation; import org.springframework.data.domain.Pageable; import org.springframework.data.domain.Slice; -import org.springframework.data.jpa.repository.Modifying; +import org.springframework.data.jpa.repository.JpaRepository; import org.springframework.data.jpa.repository.Query; import org.springframework.data.repository.query.Param; @@ -28,7 +27,7 @@ import org.springframework.data.repository.query.Param; * #L% */ -public interface ITermConceptDesignationDao extends IHapiJpaRepository { +public interface ITermConceptDesignationDao extends JpaRepository { @Query("SELECT t.myId FROM TermConceptDesignation t WHERE t.myCodeSystemVersion.myId = :csv_pid") Slice findIdsByCodeSystemVersion(Pageable thePage, @Param("csv_pid") Long thePid); @@ -36,9 +35,4 @@ public interface ITermConceptDesignationDao extends IHapiJpaRepository { +public interface ITermConceptParentChildLinkDao extends JpaRepository { @Query("SELECT COUNT(t) FROM TermConceptParentChildLink t WHERE t.myCodeSystem.myId = :cs_pid") Integer countByCodeSystemVersion(@Param("cs_pid") Long thePid); @@ -41,13 +40,4 @@ public interface ITermConceptParentChildLinkDao extends IHapiJpaRepository findIdsByCodeSystemVersion(Pageable thePage, @Param("cs_pid") Long thePid); - @Modifying - @Query("DELETE FROM TermConceptParentChildLink t WHERE t.myChildPid = :pid OR t.myParentPid = :pid") - void deleteByConceptPid(@Param("pid") Long theId); - - @Override - @Modifying - @Query("DELETE FROM TermConceptParentChildLink t WHERE t.myPid = :pid") - void deleteByPid(@Param("pid") Long theId); - } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermConceptPropertyDao.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermConceptPropertyDao.java index 1ceef14085d..0dd1bb141de 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermConceptPropertyDao.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/data/ITermConceptPropertyDao.java @@ -1,10 +1,9 @@ package ca.uhn.fhir.jpa.dao.data; -import ca.uhn.fhir.jpa.dao.IHapiJpaRepository; import ca.uhn.fhir.jpa.entity.TermConceptProperty; import org.springframework.data.domain.Pageable; import org.springframework.data.domain.Slice; -import org.springframework.data.jpa.repository.Modifying; +import org.springframework.data.jpa.repository.JpaRepository; import org.springframework.data.jpa.repository.Query; import org.springframework.data.repository.query.Param; @@ -28,7 +27,7 @@ import org.springframework.data.repository.query.Param; * #L% */ -public interface ITermConceptPropertyDao extends IHapiJpaRepository { +public interface ITermConceptPropertyDao extends JpaRepository { @Query("SELECT t.myId FROM TermConceptProperty t WHERE t.myCodeSystemVersion.myId = :cs_pid") Slice findIdsByCodeSystemVersion(Pageable thePage, @Param("cs_pid") Long thePid); @@ -36,9 +35,4 @@ public interface ITermConceptPropertyDao extends IHapiJpaRepository { - counter.addAndGet(doExpungeEverythingQuery("DELETE from " + org.hibernate.search.jpa.Search.class.getSimpleName() + " d")); + counter.addAndGet(doExpungeEverythingQuery("DELETE from " + Search.class.getSimpleName() + " d")); return null; }); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/predicate/PredicateBuilderCoords.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/predicate/PredicateBuilderCoords.java index 0e7d1bab9eb..3e996a93c22 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/predicate/PredicateBuilderCoords.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/dao/predicate/PredicateBuilderCoords.java @@ -24,15 +24,14 @@ import ca.uhn.fhir.context.RuntimeSearchParam; import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.dao.LegacySearchBuilder; import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamCoords; -import ca.uhn.fhir.jpa.model.entity.ResourceLink; import ca.uhn.fhir.jpa.util.CoordCalculator; -import ca.uhn.fhir.jpa.util.SearchBox; import ca.uhn.fhir.model.api.IQueryParameterType; import ca.uhn.fhir.model.dstu2.resource.Location; import ca.uhn.fhir.rest.param.QuantityParam; import ca.uhn.fhir.rest.param.SpecialParam; import ca.uhn.fhir.rest.param.TokenParam; import com.google.common.annotations.VisibleForTesting; +import org.hibernate.search.engine.spatial.GeoBoundingBox; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.context.annotation.Scope; @@ -116,7 +115,7 @@ public class PredicateBuilderCoords extends BasePredicateBuilder implements IPre double latitudeDegrees = Double.parseDouble(latitudeValue); double longitudeDegrees = Double.parseDouble(longitudeValue); - SearchBox box = CoordCalculator.getBox(latitudeDegrees, longitudeDegrees, distanceKm); + GeoBoundingBox box = CoordCalculator.getBox(latitudeDegrees, longitudeDegrees, distanceKm); latitudePredicate = latitudePredicateFromBox(theBuilder, theFrom, box); longitudePredicate = longitudePredicateFromBox(theBuilder, theFrom, box); } @@ -124,24 +123,24 @@ public class PredicateBuilderCoords extends BasePredicateBuilder implements IPre return combineParamIndexPredicateWithParamNamePredicate(theResourceName, theSearchParam.getName(), theFrom, singleCode, theRequestPartitionId); } - private Predicate latitudePredicateFromBox(CriteriaBuilder theBuilder, From theFrom, SearchBox theBox) { + private Predicate latitudePredicateFromBox(CriteriaBuilder theBuilder, From theFrom, GeoBoundingBox theBox) { return theBuilder.and( - theBuilder.greaterThanOrEqualTo(theFrom.get("myLatitude"), theBox.getSouthWest().getLatitude()), - theBuilder.lessThanOrEqualTo(theFrom.get("myLatitude"), theBox.getNorthEast().getLatitude()) + theBuilder.greaterThanOrEqualTo(theFrom.get("myLatitude"), theBox.bottomRight().latitude()), + theBuilder.lessThanOrEqualTo(theFrom.get("myLatitude"), theBox.topLeft().latitude()) ); } @VisibleForTesting - Predicate longitudePredicateFromBox(CriteriaBuilder theBuilder, From theFrom, SearchBox theBox) { - if (theBox.crossesAntiMeridian()) { + Predicate longitudePredicateFromBox(CriteriaBuilder theBuilder, From theFrom, GeoBoundingBox theBox) { + if (theBox.bottomRight().longitude() < theBox.topLeft().longitude()) { return theBuilder.or( - theBuilder.greaterThanOrEqualTo(theFrom.get("myLongitude"), theBox.getNorthEast().getLongitude()), - theBuilder.lessThanOrEqualTo(theFrom.get("myLongitude"), theBox.getSouthWest().getLongitude()) + theBuilder.greaterThanOrEqualTo(theFrom.get("myLongitude"), theBox.bottomRight().longitude()), + theBuilder.lessThanOrEqualTo(theFrom.get("myLongitude"), theBox.topLeft().longitude()) ); } return theBuilder.and( - theBuilder.greaterThanOrEqualTo(theFrom.get("myLongitude"), theBox.getSouthWest().getLongitude()), - theBuilder.lessThanOrEqualTo(theFrom.get("myLongitude"), theBox.getNorthEast().getLongitude()) + theBuilder.greaterThanOrEqualTo(theFrom.get("myLongitude"), theBox.topLeft().longitude()), + theBuilder.lessThanOrEqualTo(theFrom.get("myLongitude"), theBox.bottomRight().longitude()) ); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/MdmLink.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/MdmLink.java index f52c6bd6119..cd8cbff422d 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/MdmLink.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/MdmLink.java @@ -285,7 +285,12 @@ public class MdmLink { return myEidMatch; } - public boolean isEidMatch() { + /** + * Note that this method can not be called getEidMatch or + * isEidMatch because Hibernate Search complains about having + * 2 accessors for this property + */ + public boolean isEidMatchPresent() { return myEidMatch != null && myEidMatch; } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermCodeSystemVersion.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermCodeSystemVersion.java index 7b56d032cda..81ca27ab680 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermCodeSystemVersion.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermCodeSystemVersion.java @@ -40,6 +40,7 @@ import javax.persistence.OneToMany; import javax.persistence.OneToOne; import javax.persistence.SequenceGenerator; import javax.persistence.Table; +import javax.persistence.Transient; import javax.persistence.UniqueConstraint; import java.io.Serializable; import java.util.ArrayList; @@ -57,6 +58,7 @@ public class TermCodeSystemVersion implements Serializable { public static final String IDX_CODESYSTEM_AND_VER = "IDX_CODESYSTEM_AND_VER"; public static final int MAX_VERSION_LENGTH = 200; private static final long serialVersionUID = 1L; + @OneToMany(fetch = FetchType.LAZY, mappedBy = "myCodeSystem") private Collection myConcepts; @@ -73,7 +75,7 @@ public class TermCodeSystemVersion implements Serializable { @Column(name = "RES_ID", nullable = false, insertable = false, updatable = false) private Long myResourcePid; - @Column(name = "CS_VERSION_ID", nullable = true, updatable = false, length = MAX_VERSION_LENGTH) + @Column(name = "CS_VERSION_ID", nullable = true, updatable = true, length = MAX_VERSION_LENGTH) private String myCodeSystemVersionId; /** @@ -91,7 +93,7 @@ public class TermCodeSystemVersion implements Serializable { @OneToOne(mappedBy = "myCurrentVersion", optional = true, fetch = FetchType.LAZY) private TermCodeSystem myCodeSystemHavingThisVersionAsCurrentVersionIfAny; - @Column(name = "CS_DISPLAY", nullable = true, updatable = false, length = MAX_VERSION_LENGTH) + @Column(name = "CS_DISPLAY", nullable = true, updatable = true, length = MAX_VERSION_LENGTH) private String myCodeSystemDisplayName; /** @@ -196,6 +198,7 @@ public class TermCodeSystemVersion implements Serializable { public String toString() { ToStringBuilder b = new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE); b.append("pid", myId); + b.append("displayName", myCodeSystemDisplayName); b.append("codeSystemResourcePid", myResourcePid); b.append("codeSystemPid", myCodeSystemPid); b.append("codeSystemVersionId", myCodeSystemVersionId); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConcept.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConcept.java index a1c7b3d66d6..aafe8449e2d 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConcept.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConcept.java @@ -22,14 +22,21 @@ package ca.uhn.fhir.jpa.entity; import ca.uhn.fhir.context.support.IValidationSupport; import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink.RelationshipTypeEnum; -import ca.uhn.fhir.jpa.search.DeferConceptIndexingInterceptor; +import ca.uhn.fhir.jpa.search.DeferConceptIndexingRoutingBinder; import ca.uhn.fhir.util.ValidateUtil; import org.apache.commons.lang3.Validate; import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; -import org.hibernate.search.annotations.*; +import org.hibernate.search.engine.backend.types.Projectable; +import org.hibernate.search.engine.backend.types.Searchable; +import org.hibernate.search.mapper.pojo.bridge.mapping.annotation.PropertyBinderRef; +import org.hibernate.search.mapper.pojo.bridge.mapping.annotation.RoutingBinderRef; +import org.hibernate.search.mapper.pojo.mapping.definition.annotation.FullTextField; +import org.hibernate.search.mapper.pojo.mapping.definition.annotation.GenericField; +import org.hibernate.search.mapper.pojo.mapping.definition.annotation.Indexed; +import org.hibernate.search.mapper.pojo.mapping.definition.annotation.PropertyBinding; import org.hl7.fhir.r4.model.Coding; import javax.annotation.Nonnull; @@ -43,7 +50,7 @@ import static org.apache.commons.lang3.StringUtils.left; import static org.apache.commons.lang3.StringUtils.length; @Entity -@Indexed(interceptor = DeferConceptIndexingInterceptor.class) +@Indexed(routingBinder=@RoutingBinderRef(type = DeferConceptIndexingRoutingBinder.class)) @Table(name = "TRM_CONCEPT", uniqueConstraints = { @UniqueConstraint(name = "IDX_CONCEPT_CS_CODE", columnNames = {"CODESYSTEM_PID", "CODEVAL"}) }, indexes = { @@ -55,49 +62,59 @@ public class TermConcept implements Serializable { public static final int MAX_DESC_LENGTH = 400; private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(TermConcept.class); private static final long serialVersionUID = 1L; + @OneToMany(fetch = FetchType.LAZY, mappedBy = "myParent", cascade = {}) private List myChildren; @Column(name = "CODEVAL", nullable = false, length = MAX_CODE_LENGTH) - @Fields({@Field(name = "myCode", index = org.hibernate.search.annotations.Index.YES, store = Store.YES, analyze = Analyze.YES, analyzer = @Analyzer(definition = "exactAnalyzer")),}) + @FullTextField(name = "myCode", searchable = Searchable.YES, projectable = Projectable.YES, analyzer = "exactAnalyzer") private String myCode; + @Temporal(TemporalType.TIMESTAMP) @Column(name = "CONCEPT_UPDATED", nullable = true) private Date myUpdated; + @ManyToOne(fetch = FetchType.LAZY) @JoinColumn(name = "CODESYSTEM_PID", referencedColumnName = "PID", foreignKey = @ForeignKey(name = "FK_CONCEPT_PID_CS_PID")) private TermCodeSystemVersion myCodeSystem; + @Column(name = "CODESYSTEM_PID", insertable = false, updatable = false) - @Fields({@Field(name = "myCodeSystemVersionPid")}) + @GenericField(name = "myCodeSystemVersionPid") private long myCodeSystemVersionPid; + @Column(name = "DISPLAY", nullable = true, length = MAX_DESC_LENGTH) - @Fields({ - @Field(name = "myDisplay", index = org.hibernate.search.annotations.Index.YES, store = Store.YES, analyze = Analyze.YES, analyzer = @Analyzer(definition = "standardAnalyzer")), - @Field(name = "myDisplayEdgeNGram", index = org.hibernate.search.annotations.Index.YES, store = Store.NO, analyze = Analyze.YES, analyzer = @Analyzer(definition = "autocompleteEdgeAnalyzer")), - @Field(name = "myDisplayWordEdgeNGram", index = org.hibernate.search.annotations.Index.YES, store = Store.NO, analyze = Analyze.YES, analyzer = @Analyzer(definition = "autocompleteWordEdgeAnalyzer")), - @Field(name = "myDisplayNGram", index = org.hibernate.search.annotations.Index.YES, store = Store.NO, analyze = Analyze.YES, analyzer = @Analyzer(definition = "autocompleteNGramAnalyzer")), - @Field(name = "myDisplayPhonetic", index = org.hibernate.search.annotations.Index.YES, store = Store.NO, analyze = Analyze.YES, analyzer = @Analyzer(definition = "autocompletePhoneticAnalyzer")) - }) + @FullTextField(name = "myDisplay", searchable = Searchable.YES, projectable = Projectable.YES, analyzer = "standardAnalyzer") + @FullTextField(name = "myDisplayEdgeNGram", searchable= Searchable.YES, projectable= Projectable.NO, analyzer = "autocompleteEdgeAnalyzer") + @FullTextField(name = "myDisplayWordEdgeNGram", searchable= Searchable.YES, projectable= Projectable.NO, analyzer = "autocompleteWordEdgeAnalyzer") + @FullTextField(name = "myDisplayNGram", searchable= Searchable.YES, projectable= Projectable.NO, analyzer = "autocompleteNGramAnalyzer") + @FullTextField(name = "myDisplayPhonetic", searchable= Searchable.YES, projectable= Projectable.NO, analyzer = "autocompletePhoneticAnalyzer") private String myDisplay; + @OneToMany(mappedBy = "myConcept", orphanRemoval = false, fetch = FetchType.LAZY) - @Field(name = "PROPmyProperties", analyzer = @Analyzer(definition = "termConceptPropertyAnalyzer")) - @FieldBridge(impl = TermConceptPropertyFieldBridge.class) + @PropertyBinding(binder = @PropertyBinderRef(type = TermConceptPropertyBinder.class)) private Collection myProperties; + @OneToMany(mappedBy = "myConcept", orphanRemoval = false, fetch = FetchType.LAZY) private Collection myDesignations; - @Id() + + @Id @SequenceGenerator(name = "SEQ_CONCEPT_PID", sequenceName = "SEQ_CONCEPT_PID") @GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_CONCEPT_PID") @Column(name = "PID") + @GenericField private Long myId; + @Column(name = "INDEX_STATUS", nullable = true) private Long myIndexStatus; - @Field(name = "myParentPids", index = org.hibernate.search.annotations.Index.YES, store = Store.YES, analyze = Analyze.YES, analyzer = @Analyzer(definition = "conceptParentPidsAnalyzer")) + @Lob @Column(name = "PARENT_PIDS", nullable = true) + @FullTextField(name = "myParentPids", searchable = Searchable.YES, projectable = Projectable.YES, analyzer = "conceptParentPidsAnalyzer") private String myParentPids; + @OneToMany(cascade = {}, fetch = FetchType.LAZY, mappedBy = "myChild") private List myParents; + @Column(name = "CODE_SEQUENCE", nullable = true) private Integer mySequence; @@ -382,6 +399,8 @@ public class TermConcept implements Serializable { @Override public String toString() { ToStringBuilder b = new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE); + b.append("pid", myId); + b.append("csvPid", myCodeSystemVersionPid); b.append("code", myCode); b.append("display", myDisplay); if (mySequence != null) { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptDesignation.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptDesignation.java index 6bdec49d4fa..1ca7ad136fb 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptDesignation.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptDesignation.java @@ -131,4 +131,7 @@ public class TermConceptDesignation implements Serializable { } + public Long getPid() { + return myId; + } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptParentChildLink.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptParentChildLink.java index 07ce3985a8b..24af4d17256 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptParentChildLink.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptParentChildLink.java @@ -20,8 +20,7 @@ package ca.uhn.fhir.jpa.entity; * #L% */ -import org.hibernate.search.annotations.Field; -import org.hibernate.search.annotations.Fields; +import org.hibernate.search.mapper.pojo.mapping.definition.annotation.FullTextField; import javax.persistence.*; import java.io.Serializable; @@ -44,7 +43,7 @@ public class TermConceptParentChildLink implements Serializable { private TermCodeSystemVersion myCodeSystem; @Column(name = "CODESYSTEM_PID", insertable = false, updatable = false, nullable = false) - @Fields({@Field(name = "myCodeSystemVersionPid")}) + @FullTextField(name = "myCodeSystemVersionPid") private long myCodeSystemVersionPid; @ManyToOne(fetch = FetchType.LAZY, cascade = {}) diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptProperty.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptProperty.java index 59889372d05..a736542f1bf 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptProperty.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptProperty.java @@ -251,4 +251,8 @@ public class TermConceptProperty implements Serializable { .append(myDisplay) .toHashCode(); } + + public Long getPid() { + return myId; + } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptPropertyBinder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptPropertyBinder.java new file mode 100644 index 00000000000..805fea1ed40 --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptPropertyBinder.java @@ -0,0 +1,77 @@ +package ca.uhn.fhir.jpa.entity; + +/*- + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2020 University Health Network + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.hibernate.search.engine.backend.document.DocumentElement; +import org.hibernate.search.engine.backend.document.model.dsl.IndexSchemaElement; +import org.hibernate.search.mapper.pojo.bridge.PropertyBridge; +import org.hibernate.search.mapper.pojo.bridge.binding.PropertyBindingContext; +import org.hibernate.search.mapper.pojo.bridge.mapping.programmatic.PropertyBinder; +import org.hibernate.search.mapper.pojo.bridge.runtime.PropertyBridgeWriteContext; + +import java.util.Collection; + +import static org.apache.commons.lang3.StringUtils.isNotBlank; + +/** + * Allows hibernate search to index individual concepts' properties + */ +public class TermConceptPropertyBinder implements PropertyBinder { + + + public static final String CONCEPT_FIELD_PROPERTY_PREFIX = "PROP"; + + @Override + public void bind(PropertyBindingContext thePropertyBindingContext) { + thePropertyBindingContext.dependencies().use("myKey").use("myValue"); + IndexSchemaElement indexSchemaElement = thePropertyBindingContext.indexSchemaElement(); + + //In order to support dynamic fields, we have to use field templates. We _must_ define the template at bootstrap time and cannot + //create them adhoc. https://docs.jboss.org/hibernate/search/6.0/reference/en-US/html_single/#mapper-orm-bridge-index-field-dsl-dynamic + //I _think_ im doing the right thing here by indicating that everything matching this template uses this analyzer. + indexSchemaElement.fieldTemplate("propTemplate", f -> f.asString().analyzer("termConceptPropertyAnalyzer")) + .matchingPathGlob(CONCEPT_FIELD_PROPERTY_PREFIX + "*") + .multiValued(); + + + thePropertyBindingContext.bridge(new TermConceptPropertyBridge()); + } + + private class TermConceptPropertyBridge implements PropertyBridge { + + @Override + public void write(DocumentElement theDocument, Object theObject, PropertyBridgeWriteContext thePropertyBridgeWriteContext) { + + Collection properties = (Collection) theObject; + + if (properties != null) { + for (TermConceptProperty next : properties) { + theDocument.addValue(CONCEPT_FIELD_PROPERTY_PREFIX + next.getKey(), next.getValue()); + System.out.println("Adding Prop: " + CONCEPT_FIELD_PROPERTY_PREFIX + next.getKey() + " -- " + next.getValue()); + if (next.getType() == TermConceptPropertyTypeEnum.CODING && isNotBlank(next.getDisplay())) { + theDocument.addValue(CONCEPT_FIELD_PROPERTY_PREFIX + next.getKey(), next.getDisplay()); + System.out.println("Adding multivalue Prop: " + CONCEPT_FIELD_PROPERTY_PREFIX + next.getKey() + " -- " + next.getDisplay()); + } + } + } + } + } +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptPropertyFieldBridge.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptPropertyFieldBridge.java deleted file mode 100644 index 70eec444f1f..00000000000 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/entity/TermConceptPropertyFieldBridge.java +++ /dev/null @@ -1,70 +0,0 @@ -package ca.uhn.fhir.jpa.entity; - -/*- - * #%L - * HAPI FHIR JPA Server - * %% - * Copyright (C) 2014 - 2021 Smile CDR, Inc. - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import org.apache.lucene.document.Document; -import org.apache.lucene.document.Field; -import org.apache.lucene.document.StringField; -import org.hibernate.search.bridge.FieldBridge; -import org.hibernate.search.bridge.LuceneOptions; -import org.hibernate.search.bridge.StringBridge; - -import java.util.Collection; - -import static org.apache.commons.lang3.StringUtils.isNotBlank; - -/** - * Allows hibernate search to index individual concepts' properties - */ -public class TermConceptPropertyFieldBridge implements FieldBridge, StringBridge { - - public static final String CONCEPT_FIELD_PROPERTY_PREFIX = "PROP"; - - /** - * Constructor - */ - public TermConceptPropertyFieldBridge() { - super(); - } - - @Override - public String objectToString(Object theObject) { - return theObject.toString(); - } - - @Override - public void set(String theName, Object theValue, Document theDocument, LuceneOptions theLuceneOptions) { - @SuppressWarnings("unchecked") - Collection properties = (Collection) theValue; - - if (properties != null) { - for (TermConceptProperty next : properties) { - theDocument.add(new StringField(CONCEPT_FIELD_PROPERTY_PREFIX + next.getKey(), next.getValue(), Field.Store.YES)); - - if (next.getType() == TermConceptPropertyTypeEnum.CODING) { - if (isNotBlank(next.getDisplay())) { - theDocument.add(new StringField(CONCEPT_FIELD_PROPERTY_PREFIX + next.getKey(), next.getDisplay(), Field.Store.YES)); - } - } - } - } - } -} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/JpaSystemProviderDstu2.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/JpaSystemProviderDstu2.java index 5d526d8e506..538e7a9a283 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/JpaSystemProviderDstu2.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/JpaSystemProviderDstu2.java @@ -1,17 +1,14 @@ package ca.uhn.fhir.jpa.provider; import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao; -import ca.uhn.fhir.jpa.dao.FulltextSearchSvcImpl.Suggestion; import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc; import ca.uhn.fhir.jpa.model.util.JpaConstants; -import ca.uhn.fhir.jpa.provider.dstu3.JpaSystemProviderDstu3; import ca.uhn.fhir.model.api.annotation.Description; import ca.uhn.fhir.model.dstu2.composite.MetaDt; import ca.uhn.fhir.model.dstu2.resource.Bundle; import ca.uhn.fhir.model.dstu2.resource.Parameters; import ca.uhn.fhir.model.dstu2.resource.Parameters.Parameter; import ca.uhn.fhir.model.primitive.BooleanDt; -import ca.uhn.fhir.model.primitive.DecimalDt; import ca.uhn.fhir.model.primitive.IntegerDt; import ca.uhn.fhir.model.primitive.StringDt; import ca.uhn.fhir.rest.annotation.IdParam; @@ -20,7 +17,6 @@ import ca.uhn.fhir.rest.annotation.OperationParam; import ca.uhn.fhir.rest.annotation.Transaction; import ca.uhn.fhir.rest.annotation.TransactionParam; import ca.uhn.fhir.rest.api.server.RequestDetails; -import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; import org.hl7.fhir.instance.model.api.IBaseBundle; import org.hl7.fhir.instance.model.api.IIdType; @@ -216,36 +212,6 @@ public class JpaSystemProviderDstu2 extends BaseJpaSystemProviderDstu2Plus keywords = mySearchDao.suggestKeywords(theContext, theSearchParam, theText, theRequest); - - Parameters retVal = new Parameters(); - for (Suggestion next : keywords) { - retVal.addParameter() - .addPart(new Parameter().setName("keyword").setValue(new StringDt(next.getTerm()))) - .addPart(new Parameter().setName("score").setValue(new DecimalDt(next.getScore()))); - } - - return retVal; - } - @Transaction public Bundle transaction(RequestDetails theRequestDetails, @TransactionParam Bundle theResources) { startRequest(((ServletRequestDetails) theRequestDetails).getServletRequest()); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/dstu3/JpaSystemProviderDstu3.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/dstu3/JpaSystemProviderDstu3.java index b70add7d033..7cd6746cd48 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/dstu3/JpaSystemProviderDstu3.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/dstu3/JpaSystemProviderDstu3.java @@ -1,7 +1,6 @@ package ca.uhn.fhir.jpa.provider.dstu3; import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao; -import ca.uhn.fhir.jpa.dao.FulltextSearchSvcImpl.Suggestion; import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc; import ca.uhn.fhir.jpa.model.util.JpaConstants; import ca.uhn.fhir.jpa.provider.BaseJpaSystemProviderDstu2Plus; @@ -224,36 +223,6 @@ public class JpaSystemProviderDstu3 extends BaseJpaSystemProviderDstu2Plus keywords = mySearchDao.suggestKeywords(theContext, theSearchParam, theText, theRequest); - - Parameters retVal = new Parameters(); - for (Suggestion next : keywords) { - retVal.addParameter() - .addPart(new ParametersParameterComponent().setName("keyword").setValue(new StringType(next.getTerm()))) - .addPart(new ParametersParameterComponent().setName("score").setValue(new DecimalType(next.getScore()))); - } - - return retVal; - } - @Transaction public Bundle transaction(RequestDetails theRequestDetails, @TransactionParam Bundle theResources) { startRequest(((ServletRequestDetails) theRequestDetails).getServletRequest()); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/r4/JpaSystemProviderR4.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/r4/JpaSystemProviderR4.java index b2dac0cc747..662efc47afd 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/r4/JpaSystemProviderR4.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/r4/JpaSystemProviderR4.java @@ -1,7 +1,6 @@ package ca.uhn.fhir.jpa.provider.r4; import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao; -import ca.uhn.fhir.jpa.dao.FulltextSearchSvcImpl.Suggestion; import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc; import ca.uhn.fhir.jpa.model.util.JpaConstants; import ca.uhn.fhir.jpa.provider.BaseJpaSystemProviderDstu2Plus; @@ -211,38 +210,6 @@ public class JpaSystemProviderR4 extends BaseJpaSystemProviderDstu2Plus keywords = mySearchDao.suggestKeywords(theContext, theSearchParam, theText, theRequest); - - Parameters retVal = new Parameters(); - for (Suggestion next : keywords) { - //@formatter:off - retVal.addParameter() - .addPart(new ParametersParameterComponent().setName("keyword").setValue(new StringType(next.getTerm()))) - .addPart(new ParametersParameterComponent().setName("score").setValue(new DecimalType(next.getScore()))); - //@formatter:on - } - - return retVal; - } - /** * /$process-message */ diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/r5/JpaSystemProviderR5.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/r5/JpaSystemProviderR5.java index 27a05701f72..6cb29aa50f2 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/r5/JpaSystemProviderR5.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/provider/r5/JpaSystemProviderR5.java @@ -1,7 +1,6 @@ package ca.uhn.fhir.jpa.provider.r5; import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao; -import ca.uhn.fhir.jpa.dao.FulltextSearchSvcImpl.Suggestion; import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc; import ca.uhn.fhir.jpa.model.util.JpaConstants; import ca.uhn.fhir.jpa.provider.BaseJpaSystemProviderDstu2Plus; @@ -213,38 +212,6 @@ public class JpaSystemProviderR5 extends BaseJpaSystemProviderDstu2Plus keywords = mySearchDao.suggestKeywords(theContext, theSearchParam, theText, theRequest); - - Parameters retVal = new Parameters(); - for (Suggestion next : keywords) { - //@formatter:off - retVal.addParameter() - .addPart(new ParametersParameterComponent().setName("keyword").setValue(new StringType(next.getTerm()))) - .addPart(new ParametersParameterComponent().setName("score").setValue(new DecimalType(next.getScore()))); - //@formatter:on - } - - return retVal; - } - /** * /$process-message */ diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/DeferConceptIndexingInterceptor.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/DeferConceptIndexingInterceptor.java deleted file mode 100644 index cfce38127ac..00000000000 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/DeferConceptIndexingInterceptor.java +++ /dev/null @@ -1,55 +0,0 @@ -package ca.uhn.fhir.jpa.search; - -/* - * #%L - * HAPI FHIR JPA Server - * %% - * Copyright (C) 2014 - 2021 Smile CDR, Inc. - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import org.hibernate.search.indexes.interceptor.EntityIndexingInterceptor; -import org.hibernate.search.indexes.interceptor.IndexingOverride; - -import ca.uhn.fhir.jpa.entity.TermConcept; - -public class DeferConceptIndexingInterceptor implements EntityIndexingInterceptor { - - @Override - public IndexingOverride onAdd(TermConcept theEntity) { - if (theEntity.getIndexStatus() == null) { - return IndexingOverride.SKIP; - } - - return IndexingOverride.APPLY_DEFAULT; - } - - @Override - public IndexingOverride onCollectionUpdate(TermConcept theEntity) { - return IndexingOverride.APPLY_DEFAULT; - } - - - @Override - public IndexingOverride onDelete(TermConcept theEntity) { - return IndexingOverride.APPLY_DEFAULT; - } - - @Override - public IndexingOverride onUpdate(TermConcept theEntity) { - return onAdd(theEntity); - } - -} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/DeferConceptIndexingRoutingBinder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/DeferConceptIndexingRoutingBinder.java new file mode 100644 index 00000000000..2d0a4c5252e --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/DeferConceptIndexingRoutingBinder.java @@ -0,0 +1,53 @@ +package ca.uhn.fhir.jpa.search; + +/* + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2020 University Health Network + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import ca.uhn.fhir.jpa.entity.TermConcept; +import org.hibernate.search.mapper.pojo.bridge.RoutingBridge; +import org.hibernate.search.mapper.pojo.bridge.binding.RoutingBindingContext; +import org.hibernate.search.mapper.pojo.bridge.mapping.programmatic.RoutingBinder; +import org.hibernate.search.mapper.pojo.bridge.runtime.RoutingBridgeRouteContext; +import org.hibernate.search.mapper.pojo.route.DocumentRoutes; + +public class DeferConceptIndexingRoutingBinder implements RoutingBinder { + @Override + public void bind(RoutingBindingContext theRoutingBindingContext) { + theRoutingBindingContext.dependencies().use("myIndexStatus"); + + theRoutingBindingContext.bridge(TermConcept.class, new TermConceptBridge()); + } + + private class TermConceptBridge implements RoutingBridge { + @Override + public void route(DocumentRoutes theDocumentRoutes, Object theO, TermConcept theTermConcept, RoutingBridgeRouteContext theRoutingBridgeRouteContext) { + if (theTermConcept.getIndexStatus() == null) { + theDocumentRoutes.notIndexed(); + } else { + theDocumentRoutes.addRoute(); + } + } + + @Override + public void previousRoutes(DocumentRoutes theDocumentRoutes, Object theO, TermConcept theTermConcept, RoutingBridgeRouteContext theRoutingBridgeRouteContext) { + theDocumentRoutes.addRoute(); + } + } +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/HapiLuceneAnalysisConfigurer.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/HapiLuceneAnalysisConfigurer.java new file mode 100644 index 00000000000..4b0e0111ab8 --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/HapiLuceneAnalysisConfigurer.java @@ -0,0 +1,91 @@ +package ca.uhn.fhir.jpa.search; + +/*- + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2020 University Health Network + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import com.sun.xml.bind.api.impl.NameConverter; +import org.apache.lucene.analysis.core.KeywordTokenizerFactory; +import org.apache.lucene.analysis.core.LowerCaseFilterFactory; +import org.apache.lucene.analysis.core.StopFilterFactory; +import org.apache.lucene.analysis.core.WhitespaceTokenizerFactory; +import org.apache.lucene.analysis.miscellaneous.WordDelimiterFilterFactory; +import org.apache.lucene.analysis.ngram.EdgeNGramFilterFactory; +import org.apache.lucene.analysis.ngram.NGramFilterFactory; +import org.apache.lucene.analysis.pattern.PatternTokenizerFactory; +import org.apache.lucene.analysis.phonetic.PhoneticFilterFactory; +import org.apache.lucene.analysis.snowball.SnowballPorterFilterFactory; +import org.apache.lucene.analysis.standard.StandardTokenizerFactory; +import org.hibernate.search.backend.lucene.analysis.LuceneAnalysisConfigurationContext; +import org.hibernate.search.backend.lucene.analysis.LuceneAnalysisConfigurer; +import org.springframework.stereotype.Component; +import org.springframework.stereotype.Service; + +/** + * Factory for defining the analysers. + */ +@Component +public class HapiLuceneAnalysisConfigurer implements LuceneAnalysisConfigurer { + + @Override + public void configure(LuceneAnalysisConfigurationContext theLuceneCtx) { + theLuceneCtx.analyzer("autocompleteEdgeAnalyzer").custom() + .tokenizer(PatternTokenizerFactory.class).param("pattern", "(.*)").param("group", "1") + .tokenFilter(LowerCaseFilterFactory.class) + .tokenFilter(StopFilterFactory.class) + .tokenFilter(EdgeNGramFilterFactory.class) + .param("minGramSize", "3") + .param("maxGramSize", "50"); + + theLuceneCtx.analyzer("autocompletePhoneticAnalyzer").custom() + .tokenizer(StandardTokenizerFactory.class) + .tokenFilter(StopFilterFactory.class) + .tokenFilter(PhoneticFilterFactory.class).param("encoder", "DoubleMetaphone") + .tokenFilter(SnowballPorterFilterFactory.class).param("language", "English"); + + theLuceneCtx.analyzer("autocompleteNGramAnalyzer").custom() + .tokenizer(StandardTokenizerFactory.class) + .tokenFilter(WordDelimiterFilterFactory.class) + .tokenFilter(LowerCaseFilterFactory.class) + .tokenFilter(NGramFilterFactory.class) + .param("minGramSize", "3") + .param("maxGramSize", "20"); + + theLuceneCtx.analyzer("autocompleteWordEdgeAnalyzer").custom() + .tokenizer(StandardTokenizerFactory.class) + .tokenFilter(LowerCaseFilterFactory.class) + .tokenFilter(StopFilterFactory.class) + .tokenFilter(EdgeNGramFilterFactory.class) + .param("minGramSize", "2") + .param("maxGramSize", "20"); + + theLuceneCtx.analyzer("standardAnalyzer").custom() + .tokenizer(StandardTokenizerFactory.class) + .tokenFilter(LowerCaseFilterFactory.class); + + theLuceneCtx.analyzer("exactAnalyzer").custom() + .tokenizer(KeywordTokenizerFactory.class); + + theLuceneCtx.analyzer("conceptParentPidsAnalyzer").custom() + .tokenizer(WhitespaceTokenizerFactory.class); + + theLuceneCtx.analyzer("termConceptPropertyAnalyzer").custom() + .tokenizer(WhitespaceTokenizerFactory.class); + } +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/LuceneSearchMappingFactory.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/LuceneSearchMappingFactory.java deleted file mode 100644 index bc3bcec38e5..00000000000 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/LuceneSearchMappingFactory.java +++ /dev/null @@ -1,78 +0,0 @@ -package ca.uhn.fhir.jpa.search; - -/*- - * #%L - * HAPI FHIR JPA Server - * %% - * Copyright (C) 2014 - 2021 Smile CDR, Inc. - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import org.apache.lucene.analysis.core.*; -import org.apache.lucene.analysis.miscellaneous.WordDelimiterFilterFactory; -import org.apache.lucene.analysis.ngram.EdgeNGramFilterFactory; -import org.apache.lucene.analysis.ngram.NGramFilterFactory; -import org.apache.lucene.analysis.pattern.PatternTokenizerFactory; -import org.apache.lucene.analysis.phonetic.PhoneticFilterFactory; -import org.apache.lucene.analysis.snowball.SnowballPorterFilterFactory; -import org.apache.lucene.analysis.standard.StandardFilterFactory; -import org.apache.lucene.analysis.standard.StandardTokenizerFactory; -import org.hibernate.search.annotations.Factory; -import org.hibernate.search.cfg.SearchMapping; - -/** - * Factory for defining the analysers. - */ -public class LuceneSearchMappingFactory { - @Factory - public SearchMapping getSearchMapping() { - SearchMapping mapping = new SearchMapping(); - - mapping.analyzerDef("autocompleteEdgeAnalyzer", PatternTokenizerFactory.class) - .tokenizerParam("pattern", "(.*)") - .tokenizerParam("group", "1") - .filter(LowerCaseFilterFactory.class) - .filter(StopFilterFactory.class) - .filter(EdgeNGramFilterFactory.class) - .param("minGramSize", "3") - .param("maxGramSize", "50") - .analyzerDef("autocompleteWordEdgeAnalyzer", StandardTokenizerFactory.class) - .filter(LowerCaseFilterFactory.class) - .filter(StopFilterFactory.class) - .filter(EdgeNGramFilterFactory.class) - .param("minGramSize", "3") - .param("maxGramSize", "20") - .analyzerDef("autocompletePhoneticAnalyzer", StandardTokenizerFactory.class) - .filter(StandardFilterFactory.class) - .filter(StopFilterFactory.class) - .filter(PhoneticFilterFactory.class) - .param("encoder", "DoubleMetaphone") - .filter(SnowballPorterFilterFactory.class) - .param("language", "English") - .analyzerDef("autocompleteNGramAnalyzer", StandardTokenizerFactory.class) - .filter(WordDelimiterFilterFactory.class) - .filter(LowerCaseFilterFactory.class) - .filter(NGramFilterFactory.class) - .param("minGramSize", "3") - .param("maxGramSize", "20") - .analyzerDef("standardAnalyzer", StandardTokenizerFactory.class) - .filter(LowerCaseFilterFactory.class) - .analyzerDef("exactAnalyzer", KeywordTokenizerFactory.class) - .analyzerDef("conceptParentPidsAnalyzer", WhitespaceTokenizerFactory.class) - .analyzerDef("termConceptPropertyAnalyzer", WhitespaceTokenizerFactory.class); - - return mapping; - } -} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchBuilder.java index a0d0adb9a16..806375c838d 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchBuilder.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/SearchBuilder.java @@ -31,7 +31,7 @@ import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.api.config.DaoConfig; import ca.uhn.fhir.jpa.api.dao.IDao; import ca.uhn.fhir.jpa.config.HapiFhirLocalContainerEntityManagerFactoryBean; -import ca.uhn.fhir.jpa.config.HibernateDialectProvider; +import ca.uhn.fhir.jpa.config.HibernatePropertiesProvider; import ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao; import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc; import ca.uhn.fhir.jpa.dao.IResultIterator; @@ -173,7 +173,7 @@ public class SearchBuilder implements ISearchBuilder { @Autowired private SqlObjectFactory mySqlBuilderFactory; @Autowired - private HibernateDialectProvider myDialectProvider; + private HibernatePropertiesProvider myDialectProvider; /** * Constructor diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/CoordsPredicateBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/CoordsPredicateBuilder.java index 6d92b34568c..bdeb7828f2e 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/CoordsPredicateBuilder.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/predicate/CoordsPredicateBuilder.java @@ -25,7 +25,6 @@ import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryBuilder; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.jpa.util.CoordCalculator; -import ca.uhn.fhir.jpa.util.SearchBox; import ca.uhn.fhir.model.api.IQueryParameterType; import ca.uhn.fhir.model.dstu2.resource.Location; import ca.uhn.fhir.rest.param.QuantityParam; @@ -35,6 +34,7 @@ import com.healthmarketscience.sqlbuilder.BinaryCondition; import com.healthmarketscience.sqlbuilder.ComboCondition; import com.healthmarketscience.sqlbuilder.Condition; import com.healthmarketscience.sqlbuilder.dbspec.basic.DbColumn; +import org.hibernate.search.engine.spatial.GeoBoundingBox; import static org.apache.commons.lang3.StringUtils.isBlank; @@ -115,7 +115,7 @@ public class CoordsPredicateBuilder extends BaseSearchParamPredicateBuilder { double latitudeDegrees = Double.parseDouble(latitudeValue); double longitudeDegrees = Double.parseDouble(longitudeValue); - SearchBox box = CoordCalculator.getBox(latitudeDegrees, longitudeDegrees, distanceKm); + GeoBoundingBox box = CoordCalculator.getBox(latitudeDegrees, longitudeDegrees, distanceKm); latitudePredicate = theFrom.createLatitudePredicateFromBox(box); longitudePredicate = theFrom.createLongitudePredicateFromBox(box); } @@ -132,23 +132,23 @@ public class CoordsPredicateBuilder extends BaseSearchParamPredicateBuilder { return BinaryCondition.equalTo(myColumnLongitude, generatePlaceholder(theLongitudeValue)); } - public Condition createLatitudePredicateFromBox(SearchBox theBox) { + public Condition createLatitudePredicateFromBox(GeoBoundingBox theBox) { return ComboCondition.and( - BinaryCondition.greaterThanOrEq(myColumnLatitude, generatePlaceholder(theBox.getSouthWest().getLatitude())), - BinaryCondition.lessThanOrEq(myColumnLatitude, generatePlaceholder(theBox.getNorthEast().getLatitude())) + BinaryCondition.greaterThanOrEq(myColumnLatitude, generatePlaceholder(theBox.bottomRight().latitude())), + BinaryCondition.lessThanOrEq(myColumnLatitude, generatePlaceholder(theBox.topLeft().latitude())) ); } - public Condition createLongitudePredicateFromBox(SearchBox theBox) { - if (theBox.crossesAntiMeridian()) { + public Condition createLongitudePredicateFromBox(GeoBoundingBox theBox) { + if (theBox.bottomRight().longitude() < theBox.topLeft().longitude()) { return ComboCondition.or( - BinaryCondition.greaterThanOrEq(myColumnLongitude, generatePlaceholder(theBox.getNorthEast().getLongitude())), - BinaryCondition.lessThanOrEq(myColumnLongitude, generatePlaceholder(theBox.getSouthWest().getLongitude())) + BinaryCondition.greaterThanOrEq(myColumnLongitude, generatePlaceholder(theBox.bottomRight().longitude())), + BinaryCondition.lessThanOrEq(myColumnLongitude, generatePlaceholder(theBox.topLeft().longitude())) ); } return ComboCondition.and( - BinaryCondition.greaterThanOrEq(myColumnLongitude, generatePlaceholder(theBox.getSouthWest().getLongitude())), - BinaryCondition.lessThanOrEq(myColumnLongitude, generatePlaceholder(theBox.getNorthEast().getLongitude())) + BinaryCondition.greaterThanOrEq(myColumnLongitude, generatePlaceholder(theBox.topLeft().longitude())), + BinaryCondition.lessThanOrEq(myColumnLongitude, generatePlaceholder(theBox.bottomRight().longitude())) ); } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/sql/SearchQueryBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/sql/SearchQueryBuilder.java index 667539ca770..c8ba5170c9f 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/sql/SearchQueryBuilder.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/builder/sql/SearchQueryBuilder.java @@ -22,7 +22,7 @@ package ca.uhn.fhir.jpa.search.builder.sql; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.interceptor.model.RequestPartitionId; -import ca.uhn.fhir.jpa.config.HibernateDialectProvider; +import ca.uhn.fhir.jpa.config.HibernatePropertiesProvider; import ca.uhn.fhir.jpa.model.config.PartitionSettings; import ca.uhn.fhir.jpa.model.entity.ModelConfig; import ca.uhn.fhir.jpa.search.builder.QueryStack; @@ -101,7 +101,7 @@ public class SearchQueryBuilder { /** * Constructor */ - public SearchQueryBuilder(FhirContext theFhirContext, ModelConfig theModelConfig, PartitionSettings thePartitionSettings, RequestPartitionId theRequestPartitionId, String theResourceType, SqlObjectFactory theSqlBuilderFactory, HibernateDialectProvider theDialectProvider, boolean theCountQuery) { + public SearchQueryBuilder(FhirContext theFhirContext, ModelConfig theModelConfig, PartitionSettings thePartitionSettings, RequestPartitionId theRequestPartitionId, String theResourceType, SqlObjectFactory theSqlBuilderFactory, HibernatePropertiesProvider theDialectProvider, boolean theCountQuery) { this(theFhirContext, theModelConfig, thePartitionSettings, theRequestPartitionId, theResourceType, theSqlBuilderFactory, UUID.randomUUID().toString() + "-", theDialectProvider.getDialect(), theCountQuery, new ArrayList<>()); } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/elastic/ElasticsearchHibernatePropertiesBuilder.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/elastic/ElasticsearchHibernatePropertiesBuilder.java index 1278048792f..778ea971d47 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/elastic/ElasticsearchHibernatePropertiesBuilder.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/elastic/ElasticsearchHibernatePropertiesBuilder.java @@ -20,29 +20,48 @@ package ca.uhn.fhir.jpa.search.elastic; * #L% */ +import ca.uhn.fhir.context.ConfigurationException; +import ca.uhn.fhir.jpa.search.lastn.ElasticsearchRestClientFactory; import org.apache.commons.lang3.StringUtils; -import org.hibernate.search.cfg.Environment; -import org.hibernate.search.elasticsearch.cfg.ElasticsearchEnvironment; -import org.hibernate.search.elasticsearch.cfg.ElasticsearchIndexStatus; -import org.hibernate.search.elasticsearch.cfg.IndexSchemaManagementStrategy; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.client.RequestOptions; +import org.elasticsearch.client.RestHighLevelClient; +import org.elasticsearch.client.indices.PutIndexTemplateRequest; +import org.elasticsearch.common.settings.Settings; +import org.hibernate.search.backend.elasticsearch.index.IndexStatus; +import org.hibernate.search.engine.cfg.BackendSettings; +import org.hibernate.search.mapper.orm.automaticindexing.session.AutomaticIndexingSynchronizationStrategyNames; +import org.hibernate.search.mapper.orm.cfg.HibernateOrmMapperSettings; +import org.hibernate.search.backend.elasticsearch.cfg.ElasticsearchBackendSettings; +import org.hibernate.search.backend.elasticsearch.cfg.ElasticsearchIndexSettings; +import org.hibernate.search.mapper.orm.schema.management.SchemaManagementStrategyName; +import org.slf4j.Logger; +import java.io.IOException; +import java.util.Arrays; import java.util.Properties; +import static org.slf4j.LoggerFactory.getLogger; + /** * This class is used to inject appropriate properties into a hibernate * Properties object being used to create an entitymanager for a HAPI - * FHIR JPA server. + * FHIR JPA server. This class also injects a starter template into the ES cluster. */ public class ElasticsearchHibernatePropertiesBuilder { + private static final Logger ourLog = getLogger(ElasticsearchHibernatePropertiesBuilder.class); + + + private IndexStatus myRequiredIndexStatus = IndexStatus.YELLOW.YELLOW; + private SchemaManagementStrategyName myIndexSchemaManagementStrategy = SchemaManagementStrategyName.CREATE; - private ElasticsearchIndexStatus myRequiredIndexStatus = ElasticsearchIndexStatus.YELLOW; private String myRestUrl; private String myUsername; private String myPassword; - private IndexSchemaManagementStrategy myIndexSchemaManagementStrategy = IndexSchemaManagementStrategy.CREATE; private long myIndexManagementWaitTimeoutMillis = 10000L; - private boolean myDebugRefreshAfterWrite = false; + private String myDebugSyncStrategy = AutomaticIndexingSynchronizationStrategyNames.ASYNC; private boolean myDebugPrettyPrintJsonLog = false; + private String myProtocol; public ElasticsearchHibernatePropertiesBuilder setUsername(String theUsername) { myUsername = theUsername; @@ -56,36 +75,35 @@ public class ElasticsearchHibernatePropertiesBuilder { public void apply(Properties theProperties) { - // Don't use the Lucene properties as they conflict - theProperties.remove("hibernate.search.model_mapping"); - // the below properties are used for ElasticSearch integration - theProperties.put("hibernate.search.default." + Environment.INDEX_MANAGER_IMPL_NAME, "elasticsearch"); - theProperties.put("hibernate.search." + ElasticsearchEnvironment.ANALYSIS_DEFINITION_PROVIDER, ElasticsearchMappingProvider.class.getName()); + theProperties.put(BackendSettings.backendKey(BackendSettings.TYPE), "elasticsearch"); + + + theProperties.put(BackendSettings.backendKey(ElasticsearchIndexSettings.ANALYSIS_CONFIGURER), HapiElasticsearchAnalysisConfigurer.class.getName()); + + theProperties.put(BackendSettings.backendKey(ElasticsearchBackendSettings.HOSTS), myRestUrl); + theProperties.put(BackendSettings.backendKey(ElasticsearchBackendSettings.PROTOCOL), myProtocol); - theProperties.put("hibernate.search.default.elasticsearch.host", myRestUrl); if (StringUtils.isNotBlank(myUsername)) { - theProperties.put("hibernate.search.default.elasticsearch.username", myUsername); + theProperties.put(BackendSettings.backendKey(ElasticsearchBackendSettings.USERNAME), myUsername); } if (StringUtils.isNotBlank(myPassword)) { - theProperties.put("hibernate.search.default.elasticsearch.password", myPassword); + theProperties.put(BackendSettings.backendKey(ElasticsearchBackendSettings.PASSWORD), myPassword); } - - theProperties.put("hibernate.search.default." + ElasticsearchEnvironment.INDEX_SCHEMA_MANAGEMENT_STRATEGY, myIndexSchemaManagementStrategy.getExternalName()); - theProperties.put("hibernate.search.default." + ElasticsearchEnvironment.INDEX_MANAGEMENT_WAIT_TIMEOUT, Long.toString(myIndexManagementWaitTimeoutMillis)); - theProperties.put("hibernate.search.default." + ElasticsearchEnvironment.REQUIRED_INDEX_STATUS, myRequiredIndexStatus.getElasticsearchString()); - + theProperties.put(HibernateOrmMapperSettings.SCHEMA_MANAGEMENT_STRATEGY, myIndexSchemaManagementStrategy.externalRepresentation()); + theProperties.put(BackendSettings.backendKey(ElasticsearchIndexSettings.SCHEMA_MANAGEMENT_MINIMAL_REQUIRED_STATUS_WAIT_TIMEOUT), Long.toString(myIndexManagementWaitTimeoutMillis)); + theProperties.put(BackendSettings.backendKey(ElasticsearchIndexSettings.SCHEMA_MANAGEMENT_MINIMAL_REQUIRED_STATUS), myRequiredIndexStatus.externalRepresentation()); // Need the mapping to be dynamic because of terminology indexes. - theProperties.put("hibernate.search.default.elasticsearch.dynamic_mapping", "true"); - - + theProperties.put(BackendSettings.backendKey(ElasticsearchIndexSettings.DYNAMIC_MAPPING), "true"); // Only for unit tests - theProperties.put("hibernate.search.default." + ElasticsearchEnvironment.REFRESH_AFTER_WRITE, Boolean.toString(myDebugRefreshAfterWrite)); - theProperties.put("hibernate.search." + ElasticsearchEnvironment.LOG_JSON_PRETTY_PRINTING, Boolean.toString(myDebugPrettyPrintJsonLog)); + theProperties.put(HibernateOrmMapperSettings.AUTOMATIC_INDEXING_SYNCHRONIZATION_STRATEGY, myDebugSyncStrategy); + theProperties.put(BackendSettings.backendKey(ElasticsearchBackendSettings.LOG_JSON_PRETTY_PRINTING), Boolean.toString(myDebugPrettyPrintJsonLog)); + + injectStartupTemplate(myProtocol, myRestUrl, myUsername, myPassword); } - public ElasticsearchHibernatePropertiesBuilder setRequiredIndexStatus(ElasticsearchIndexStatus theRequiredIndexStatus) { + public ElasticsearchHibernatePropertiesBuilder setRequiredIndexStatus(IndexStatus theRequiredIndexStatus) { myRequiredIndexStatus = theRequiredIndexStatus; return this; } @@ -95,7 +113,12 @@ public class ElasticsearchHibernatePropertiesBuilder { return this; } - public ElasticsearchHibernatePropertiesBuilder setIndexSchemaManagementStrategy(IndexSchemaManagementStrategy theIndexSchemaManagementStrategy) { + public ElasticsearchHibernatePropertiesBuilder setProtocol(String theProtocol) { + myProtocol = theProtocol; + return this; + } + + public ElasticsearchHibernatePropertiesBuilder setIndexSchemaManagementStrategy(SchemaManagementStrategyName theIndexSchemaManagementStrategy) { myIndexSchemaManagementStrategy = theIndexSchemaManagementStrategy; return this; } @@ -105,15 +128,40 @@ public class ElasticsearchHibernatePropertiesBuilder { return this; } - public ElasticsearchHibernatePropertiesBuilder setDebugRefreshAfterWrite(boolean theDebugRefreshAfterWrite) { - myDebugRefreshAfterWrite = theDebugRefreshAfterWrite; + public ElasticsearchHibernatePropertiesBuilder setDebugIndexSyncStrategy(String theSyncStrategy) { + myDebugSyncStrategy = theSyncStrategy; return this; } + public ElasticsearchHibernatePropertiesBuilder setDebugPrettyPrintJsonLog(boolean theDebugPrettyPrintJsonLog) { myDebugPrettyPrintJsonLog = theDebugPrettyPrintJsonLog; return this; } + /** + * At startup time, injects a template into the elasticsearch cluster, which is needed for handling large ngram diffs. + * TODO GGG HS: In HS6.1, we should have a native way of performing index settings manipulation at bootstrap time, so this should + * eventually be removed in favour of whatever solution they come up with. + */ + private void injectStartupTemplate(String theProtocol, String theHostAndPort, String theUsername, String thePassword) { + PutIndexTemplateRequest ngramTemplate = new PutIndexTemplateRequest("ngram-template") + .patterns(Arrays.asList("resourcetable-*", "termconcept-*")) + .settings(Settings.builder().put("index.max_ngram_diff", 50)); + int colonIndex = theHostAndPort.indexOf(":"); + String host = theHostAndPort.substring(0, colonIndex); + Integer port = Integer.valueOf(theHostAndPort.substring(colonIndex + 1)); + String qualifiedHost = theProtocol + "://" + host; + + try { + RestHighLevelClient elasticsearchHighLevelRestClient = ElasticsearchRestClientFactory.createElasticsearchHighLevelRestClient(qualifiedHost, port, theUsername, thePassword); + ourLog.info("Adding starter template for large ngram diffs"); + AcknowledgedResponse acknowledgedResponse = elasticsearchHighLevelRestClient.indices().putTemplate(ngramTemplate, RequestOptions.DEFAULT); + assert acknowledgedResponse.isAcknowledged(); + } catch (IOException theE) { + theE.printStackTrace(); + throw new ConfigurationException("Couldn't connect to the elasticsearch server to create necessary templates. Ensure the Elasticsearch user has permissions to create templates."); + } + } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/elastic/ElasticsearchMappingProvider.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/elastic/ElasticsearchMappingProvider.java deleted file mode 100644 index 643a6bebed1..00000000000 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/elastic/ElasticsearchMappingProvider.java +++ /dev/null @@ -1,69 +0,0 @@ -package ca.uhn.fhir.jpa.search.elastic; - -/*- - * #%L - * HAPI FHIR JPA Server - * %% - * Copyright (C) 2014 - 2021 Smile CDR, Inc. - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import org.hibernate.search.elasticsearch.analyzer.definition.ElasticsearchAnalysisDefinitionProvider; -import org.hibernate.search.elasticsearch.analyzer.definition.ElasticsearchAnalysisDefinitionRegistryBuilder; - -public class ElasticsearchMappingProvider implements ElasticsearchAnalysisDefinitionProvider { - - @Override - public void register(ElasticsearchAnalysisDefinitionRegistryBuilder builder) { - builder.analyzer("autocompleteEdgeAnalyzer") - .withTokenizer("pattern_all") - .withTokenFilters("lowercase", "stop", "edgengram_3_50"); - builder.tokenizer("pattern_all").type("pattern").param("pattern", "(.*)").param("group", "1"); - builder.tokenFilter("edgengram_3_50") - .type("edgeNGram") - .param("min_gram", "3") - .param("max_gram", "50"); - - builder.analyzer("autocompleteWordEdgeAnalyzer") - .withTokenizer("standard") - .withTokenFilters("lowercase", "stop", "wordedgengram_3_50"); - builder.tokenFilter("wordedgengram_3_50") - .type("edgeNGram") - .param("min_gram", "3") - .param("max_gram", "20"); - - builder.analyzer("autocompletePhoneticAnalyzer") - .withTokenizer("standard") - .withTokenFilters("standard", "stop", "snowball_english"); - builder.tokenFilter("snowball_english").type("snowball").param("language", "English"); - - builder.analyzer("autocompleteNGramAnalyzer") - .withTokenizer("standard") - .withTokenFilters("word_delimiter", "lowercase", "ngram_3_20"); - builder.tokenFilter("ngram_3_20") - .type("nGram") - .param("min_gram", "3") - .param("max_gram", "20"); - - builder.analyzer("standardAnalyzer").withTokenizer("standard").withTokenFilters("lowercase"); - - builder.analyzer("exactAnalyzer").withTokenizer("keyword"); - - builder.analyzer("conceptParentPidsAnalyzer").withTokenizer("whitespace"); - - builder.analyzer("termConceptPropertyAnalyzer").withTokenizer("whitespace"); - - } -} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/elastic/HapiElasticsearchAnalysisConfigurer.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/elastic/HapiElasticsearchAnalysisConfigurer.java new file mode 100644 index 00000000000..41d6846dcfd --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/elastic/HapiElasticsearchAnalysisConfigurer.java @@ -0,0 +1,87 @@ +package ca.uhn.fhir.jpa.search.elastic; + +/*- + * #%L + * HAPI FHIR JPA Server + * %% + * Copyright (C) 2014 - 2020 University Health Network + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import org.hibernate.search.backend.elasticsearch.analysis.ElasticsearchAnalysisConfigurationContext; +import org.hibernate.search.backend.elasticsearch.analysis.ElasticsearchAnalysisConfigurer; + +public class HapiElasticsearchAnalysisConfigurer implements ElasticsearchAnalysisConfigurer{ + + @Override + public void configure(ElasticsearchAnalysisConfigurationContext theConfigCtx) { + + theConfigCtx.analyzer("autocompleteEdgeAnalyzer").custom() + .tokenizer("pattern_all") + .tokenFilters("lowercase", "stop", "edgengram_3_50"); + + theConfigCtx.tokenizer("pattern_all") + .type("pattern") + .param("pattern", "(.*)") + .param("group", "1"); + + theConfigCtx.tokenFilter("edgengram_3_50") + .type("edgeNGram") + .param("min_gram", "3") + .param("max_gram", "50"); + + + theConfigCtx.analyzer("autocompleteWordEdgeAnalyzer").custom() + .tokenizer("standard") + .tokenFilters("lowercase", "stop", "wordedgengram_3_50"); + + theConfigCtx.tokenFilter("wordedgengram_3_50") + .type("edgeNGram") + .param("min_gram", "2") + .param("max_gram", "20"); + + theConfigCtx.analyzer("autocompletePhoneticAnalyzer").custom() + .tokenizer("standard") + .tokenFilters("stop", "snowball_english"); + + theConfigCtx.tokenFilter("snowball_english") + .type("snowball") + .param("language", "English"); + + theConfigCtx.analyzer("autocompleteNGramAnalyzer").custom() + .tokenizer("standard") + .tokenFilters("word_delimiter", "lowercase", "ngram_3_20"); + + theConfigCtx.tokenFilter("ngram_3_20") + .type("nGram") + .param("min_gram", "3") + .param("max_gram", "20"); + + + theConfigCtx.analyzer("standardAnalyzer").custom() + .tokenizer("standard") + .tokenFilters("lowercase"); + + theConfigCtx.analyzer("exactAnalyzer") + .custom() + .tokenizer("keyword"); + + theConfigCtx.analyzer("conceptParentPidsAnalyzer").custom() + .tokenizer("whitespace"); + + theConfigCtx.analyzer("termConceptPropertyAnalyzer").custom() + .tokenizer("whitespace"); + } +} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/lastn/ElasticsearchRestClientFactory.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/lastn/ElasticsearchRestClientFactory.java index 93684520418..a221a754b6f 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/lastn/ElasticsearchRestClientFactory.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/lastn/ElasticsearchRestClientFactory.java @@ -27,20 +27,38 @@ import org.apache.http.auth.UsernamePasswordCredentials; import org.apache.http.client.CredentialsProvider; import org.apache.http.impl.client.BasicCredentialsProvider; import org.apache.http.message.BasicHeader; -import org.shadehapi.elasticsearch.client.RestClient; -import org.shadehapi.elasticsearch.client.RestClientBuilder; -import org.shadehapi.elasticsearch.client.RestHighLevelClient; +import org.elasticsearch.client.RestClient; +import org.elasticsearch.client.RestClientBuilder; +import org.elasticsearch.client.RestHighLevelClient; public class ElasticsearchRestClientFactory { + + private static String determineScheme(String theHostname) { + int schemeIdx = theHostname.indexOf("://"); + if (schemeIdx > 0) { + return theHostname.substring(0, schemeIdx); + } else { + return "http"; + } + } + + private static String stripHostOfScheme(String theHostname) { + int schemeIdx = theHostname.indexOf("://"); + if (schemeIdx > 0) { + return theHostname.substring(schemeIdx + 3); + } else { + return theHostname; + } + } + static public RestHighLevelClient createElasticsearchHighLevelRestClient(String theHostname, int thePort, String theUsername, String thePassword) { final CredentialsProvider credentialsProvider = new BasicCredentialsProvider(); credentialsProvider.setCredentials(AuthScope.ANY, new UsernamePasswordCredentials(theUsername, thePassword)); - RestClientBuilder clientBuilder = RestClient.builder( - new HttpHost(theHostname, thePort)) + new HttpHost(stripHostOfScheme(theHostname), thePort, determineScheme(theHostname))) .setHttpClientConfigCallback(httpClientBuilder -> httpClientBuilder .setDefaultCredentialsProvider(credentialsProvider)); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/lastn/ElasticsearchSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/lastn/ElasticsearchSvcImpl.java index 11929cfe51e..54a28d6cf3a 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/lastn/ElasticsearchSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/lastn/ElasticsearchSvcImpl.java @@ -37,40 +37,40 @@ import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.annotations.VisibleForTesting; import org.apache.commons.lang3.Validate; -import org.shadehapi.elasticsearch.action.DocWriteResponse; -import org.shadehapi.elasticsearch.action.admin.indices.create.CreateIndexRequest; -import org.shadehapi.elasticsearch.action.admin.indices.create.CreateIndexResponse; -import org.shadehapi.elasticsearch.action.admin.indices.get.GetIndexRequest; -import org.shadehapi.elasticsearch.action.admin.indices.refresh.RefreshRequest; -import org.shadehapi.elasticsearch.action.index.IndexRequest; -import org.shadehapi.elasticsearch.action.index.IndexResponse; -import org.shadehapi.elasticsearch.action.search.SearchRequest; -import org.shadehapi.elasticsearch.action.search.SearchResponse; -import org.shadehapi.elasticsearch.client.RequestOptions; -import org.shadehapi.elasticsearch.client.RestHighLevelClient; -import org.shadehapi.elasticsearch.common.xcontent.XContentType; -import org.shadehapi.elasticsearch.index.query.BoolQueryBuilder; -import org.shadehapi.elasticsearch.index.query.MatchQueryBuilder; -import org.shadehapi.elasticsearch.index.query.QueryBuilders; -import org.shadehapi.elasticsearch.index.query.RangeQueryBuilder; -import org.shadehapi.elasticsearch.index.reindex.DeleteByQueryRequest; -import org.shadehapi.elasticsearch.search.SearchHit; -import org.shadehapi.elasticsearch.search.SearchHits; -import org.shadehapi.elasticsearch.search.aggregations.AggregationBuilder; -import org.shadehapi.elasticsearch.search.aggregations.AggregationBuilders; -import org.shadehapi.elasticsearch.search.aggregations.Aggregations; -import org.shadehapi.elasticsearch.search.aggregations.BucketOrder; -import org.shadehapi.elasticsearch.search.aggregations.bucket.composite.CompositeAggregationBuilder; -import org.shadehapi.elasticsearch.search.aggregations.bucket.composite.CompositeValuesSourceBuilder; -import org.shadehapi.elasticsearch.search.aggregations.bucket.composite.ParsedComposite; -import org.shadehapi.elasticsearch.search.aggregations.bucket.composite.TermsValuesSourceBuilder; -import org.shadehapi.elasticsearch.search.aggregations.bucket.terms.ParsedTerms; -import org.shadehapi.elasticsearch.search.aggregations.bucket.terms.Terms; -import org.shadehapi.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; -import org.shadehapi.elasticsearch.search.aggregations.metrics.tophits.ParsedTopHits; -import org.shadehapi.elasticsearch.search.aggregations.support.ValueType; -import org.shadehapi.elasticsearch.search.builder.SearchSourceBuilder; -import org.shadehapi.elasticsearch.search.sort.SortOrder; +import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.index.IndexResponse; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.client.RequestOptions; +import org.elasticsearch.client.RestHighLevelClient; +import org.elasticsearch.client.indices.CreateIndexRequest; +import org.elasticsearch.client.indices.CreateIndexResponse; +import org.elasticsearch.client.indices.GetIndexRequest; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.query.BoolQueryBuilder; +import org.elasticsearch.index.query.MatchQueryBuilder; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.index.query.RangeQueryBuilder; +import org.elasticsearch.index.reindex.DeleteByQueryRequest; +import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.SearchHits; +import org.elasticsearch.search.aggregations.AggregationBuilder; +import org.elasticsearch.search.aggregations.AggregationBuilders; +import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.BucketOrder; +import org.elasticsearch.search.aggregations.bucket.composite.CompositeAggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.composite.CompositeValuesSourceBuilder; +import org.elasticsearch.search.aggregations.bucket.composite.ParsedComposite; +import org.elasticsearch.search.aggregations.bucket.composite.TermsValuesSourceBuilder; +import org.elasticsearch.search.aggregations.bucket.terms.ParsedTerms; +import org.elasticsearch.search.aggregations.bucket.terms.Terms; +import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; +import org.elasticsearch.search.aggregations.metrics.ParsedTopHits; +import org.elasticsearch.search.aggregations.support.ValueType; +import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.search.sort.SortOrder; import org.springframework.beans.factory.annotation.Autowired; import java.io.BufferedReader; @@ -120,9 +120,16 @@ public class ElasticsearchSvcImpl implements IElasticsearchSvc { private final RestHighLevelClient myRestHighLevelClient; private final ObjectMapper objectMapper = new ObjectMapper(); + @Autowired private PartitionSettings myPartitionSettings; + //This constructor used to inject a dummy partitionsettings in test. + public ElasticsearchSvcImpl(PartitionSettings thePartitionSetings, String theHostname, int thePort, String theUsername, String thePassword) { + this(theHostname, thePort, theUsername, thePassword); + this.myPartitionSettings = thePartitionSetings; + } + public ElasticsearchSvcImpl(String theHostname, int thePort, String theUsername, String thePassword) { myRestHighLevelClient = ElasticsearchRestClientFactory.createElasticsearchHighLevelRestClient(theHostname, thePort, theUsername, thePassword); @@ -176,8 +183,7 @@ public class ElasticsearchSvcImpl implements IElasticsearchSvc { } private boolean indexExists(String theIndexName) throws IOException { - GetIndexRequest request = new GetIndexRequest(); - request.indices(theIndexName); + GetIndexRequest request = new GetIndexRequest(theIndexName); return myRestHighLevelClient.indices().exists(request, RequestOptions.DEFAULT); } @@ -278,14 +284,14 @@ public class ElasticsearchSvcImpl implements IElasticsearchSvc { } private TermsAggregationBuilder createObservationCodeAggregationBuilder(int theMaxNumberObservationsPerCode, String[] theTopHitsInclude) { - TermsAggregationBuilder observationCodeCodeAggregationBuilder = new TermsAggregationBuilder(GROUP_BY_CODE, ValueType.STRING).field(OBSERVATION_CODEVALUE_FIELD_NAME); + TermsAggregationBuilder observationCodeCodeAggregationBuilder = new TermsAggregationBuilder(GROUP_BY_CODE).field(OBSERVATION_CODEVALUE_FIELD_NAME); observationCodeCodeAggregationBuilder.order(BucketOrder.key(true)); // Top Hits Aggregation observationCodeCodeAggregationBuilder.subAggregation(AggregationBuilders.topHits(MOST_RECENT_EFFECTIVE) .sort(OBSERVATION_EFFECTIVEDTM_FIELD_NAME, SortOrder.DESC) .fetchSource(theTopHitsInclude, null).size(theMaxNumberObservationsPerCode)); observationCodeCodeAggregationBuilder.size(10000); - TermsAggregationBuilder observationCodeSystemAggregationBuilder = new TermsAggregationBuilder(GROUP_BY_SYSTEM, ValueType.STRING).field(OBSERVATION_CODESYSTEM_FIELD_NAME); + TermsAggregationBuilder observationCodeSystemAggregationBuilder = new TermsAggregationBuilder(GROUP_BY_SYSTEM).field(OBSERVATION_CODESYSTEM_FIELD_NAME); observationCodeSystemAggregationBuilder.order(BucketOrder.key(true)); observationCodeSystemAggregationBuilder.subAggregation(observationCodeCodeAggregationBuilder); return observationCodeSystemAggregationBuilder; @@ -728,8 +734,6 @@ public class ElasticsearchSvcImpl implements IElasticsearchSvc { private IndexRequest createIndexRequest(String theIndexName, String theDocumentId, String theObservationDocument, String theDocumentType) { IndexRequest request = new IndexRequest(theIndexName); request.id(theDocumentId); - request.type(theDocumentType); - request.source(theObservationDocument, XContentType.JSON); return request; } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/reindex/ResourceReindexingSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/reindex/ResourceReindexingSvcImpl.java index 1494f693e57..12a8322c5b8 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/reindex/ResourceReindexingSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/search/reindex/ResourceReindexingSvcImpl.java @@ -46,7 +46,6 @@ import com.google.common.annotations.VisibleForTesting; import org.apache.commons.lang3.Validate; import org.apache.commons.lang3.concurrent.BasicThreadFactory; import org.apache.commons.lang3.time.DateUtils; -import org.hibernate.search.util.impl.Executors; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.r4.model.InstantType; import org.quartz.JobExecutionContext; @@ -165,7 +164,7 @@ public class ResourceReindexingSvcImpl implements IResourceReindexingSvc { public void initExecutor() { // Create the threadpool executor used for reindex jobs int reindexThreadCount = myDaoConfig.getReindexThreadCount(); - RejectedExecutionHandler rejectHandler = new Executors.BlockPolicy(); + RejectedExecutionHandler rejectHandler = new BlockPolicy(); myTaskExecutor = new ThreadPoolExecutor(0, reindexThreadCount, 0L, TimeUnit.MILLISECONDS, new LinkedBlockingQueue<>(100), @@ -173,6 +172,30 @@ public class ResourceReindexingSvcImpl implements IResourceReindexingSvc { rejectHandler ); } + /** + * A handler for rejected tasks that will have the caller block until space is available. + * This was stolen from old hibernate search(5.X.X), as it has been removed in HS6. We can probably come up with a better solution though. + */ + public static class BlockPolicy implements RejectedExecutionHandler { + + /** + * Puts the Runnable to the blocking queue, effectively blocking the delegating thread until space is available. + * + * @param r the runnable task requested to be executed + * @param e the executor attempting to execute this task + */ + @Override + public void rejectedExecution(Runnable r, ThreadPoolExecutor e) { + try { + e.getQueue().put( r ); + } + catch (InterruptedException e1) { + ourLog.error("Interrupted Execption for task: {}",r, e1 ); + Thread.currentThread().interrupt(); + } + } + } + public void scheduleJob() { ScheduledJobDefinition jobDetail = new ScheduledJobDefinition(); @@ -545,6 +568,7 @@ public class ResourceReindexingSvcImpl implements IResourceReindexingSvc { } doReindex(resourceTable, resource); + return null; } catch (Exception e) { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/BaseTermReadSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/BaseTermReadSvcImpl.java index a7120617413..fc894e424a4 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/BaseTermReadSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/BaseTermReadSvcImpl.java @@ -31,6 +31,7 @@ import ca.uhn.fhir.jpa.api.dao.IDao; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoCodeSystem; import ca.uhn.fhir.jpa.api.model.TranslationQuery; import ca.uhn.fhir.jpa.api.model.TranslationRequest; +import ca.uhn.fhir.jpa.config.HibernatePropertiesProvider; import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc; import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemDao; import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemVersionDao; @@ -56,7 +57,7 @@ import ca.uhn.fhir.jpa.entity.TermConceptMapGroupElementTarget; import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink; import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink.RelationshipTypeEnum; import ca.uhn.fhir.jpa.entity.TermConceptProperty; -import ca.uhn.fhir.jpa.entity.TermConceptPropertyFieldBridge; +import ca.uhn.fhir.jpa.entity.TermConceptPropertyBinder; import ca.uhn.fhir.jpa.entity.TermConceptPropertyTypeEnum; import ca.uhn.fhir.jpa.entity.TermValueSet; import ca.uhn.fhir.jpa.entity.TermValueSetConcept; @@ -98,18 +99,17 @@ import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.Validate; import org.apache.commons.lang3.time.DateUtils; import org.apache.lucene.index.Term; -import org.apache.lucene.queries.TermsQuery; -import org.apache.lucene.search.BooleanClause; -import org.apache.lucene.search.BooleanQuery; -import org.apache.lucene.search.Query; import org.apache.lucene.search.RegexpQuery; -import org.apache.lucene.search.TermQuery; import org.hibernate.ScrollMode; import org.hibernate.ScrollableResults; -import org.hibernate.search.jpa.FullTextEntityManager; -import org.hibernate.search.jpa.FullTextQuery; -import org.hibernate.search.query.dsl.BooleanJunction; -import org.hibernate.search.query.dsl.QueryBuilder; +import org.hibernate.search.backend.elasticsearch.ElasticsearchExtension; +import org.hibernate.search.backend.lucene.LuceneExtension; +import org.hibernate.search.engine.search.predicate.dsl.BooleanPredicateClausesStep; +import org.hibernate.search.engine.search.predicate.dsl.PredicateFinalStep; +import org.hibernate.search.engine.search.predicate.dsl.SearchPredicateFactory; +import org.hibernate.search.engine.search.query.SearchQuery; +import org.hibernate.search.mapper.orm.Search; +import org.hibernate.search.mapper.orm.session.SearchSession; import org.hl7.fhir.common.hapi.validation.support.CommonCodeSystemsTerminologyService; import org.hl7.fhir.common.hapi.validation.support.InMemoryTerminologyServerValidationSupport; import org.hl7.fhir.exceptions.FHIRException; @@ -252,6 +252,14 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc { private volatile IValidationSupport myJpaValidationSupport; private volatile IValidationSupport myValidationSupport; + //We need this bean so we can tell which mode hibernate search is running in. + @Autowired + private HibernatePropertiesProvider myHibernatePropertiesProvider; + + private boolean isFullTextSetToUseElastic() { + return "elasticsearch".equalsIgnoreCase(myHibernatePropertiesProvider.getHibernateSearchBackend()); + } + @Override public boolean isCodeSystemSupported(ValidationSupportContext theValidationSupportContext, String theSystem) { TermCodeSystemVersion cs = getCurrentCodeSystemVersion(theSystem); @@ -538,6 +546,8 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc { conceptViews = myTermValueSetConceptViewDao.findByTermValueSetId(theTermValueSet.getId(), displayValue); wasFilteredResult = true; } else { + // TODO JA HS: I'm pretty sure we are overfetching here. test says offset 3, count 4, but we are fetching index 3 -> 10 here, grabbing 7 concepts. + //Specifically this test testExpandInline_IncludePreExpandedValueSetByUri_FilterOnDisplay_LeftMatch_SelectRange conceptViews = myTermValueSetConceptViewDao.findByTermValueSetId(offset, toIndex, theTermValueSet.getId()); theAccumulator.consumeSkipCount(offset); if (theAdd) { @@ -899,8 +909,8 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc { } else { csv = myCodeSystemVersionDao.findByCodeSystemPidAndVersion(theCs.getPid(), includeOrExcludeVersion); } - FullTextEntityManager em = org.hibernate.search.jpa.Search.getFullTextEntityManager(myEntityManager); + SearchSession searchSession = Search.session(myEntityManager); /* * If FullText searching is not enabled, we can handle only basic expansions * since we're going to do it without the database. @@ -913,68 +923,41 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc { /* * Ok, let's use hibernate search to build the expansion */ - QueryBuilder qb = em.getSearchFactory().buildQueryBuilder().forEntity(TermConcept.class).get(); - BooleanJunction bool = qb.bool(); + //Manually building a predicate since we need to throw it around. + SearchPredicateFactory predicate = searchSession.scope(TermConcept.class).predicate(); - bool.must(qb.keyword().onField("myCodeSystemVersionPid").matching(csv.getPid()).createQuery()); + //Build the top-level expansion on filters. + PredicateFinalStep step = predicate.bool(b -> { + b.must(predicate.match().field("myCodeSystemVersionPid").matching(csv.getPid())); - if (theExpansionFilter.hasCode()) { - bool.must(qb.keyword().onField("myCode").matching(theExpansionFilter.getCode()).createQuery()); - } - - /* - * Filters - */ - String codeSystemUrlAndVersion; - if (includeOrExcludeVersion != null) { - codeSystemUrlAndVersion = theSystem + "|" + includeOrExcludeVersion; - } else { - codeSystemUrlAndVersion = theSystem; - } - for (ValueSet.ConceptSetFilterComponent nextFilter : theIncludeOrExclude.getFilter()) { - handleFilter(codeSystemUrlAndVersion, qb, bool, nextFilter); - } - for (ValueSet.ConceptSetFilterComponent nextFilter : theExpansionFilter.getFilters()) { - handleFilter(codeSystemUrlAndVersion, qb, bool, nextFilter); - } - - Query luceneQuery = bool.createQuery(); - - /* - * Include/Exclude Concepts - */ - List codes = theIncludeOrExclude - .getConcept() - .stream() - .filter(Objects::nonNull) - .map(ValueSet.ConceptReferenceComponent::getCode) - .filter(StringUtils::isNotBlank) - .map(t -> new Term("myCode", t)) - .collect(Collectors.toList()); - if (codes.size() > 0) { - - BooleanQuery.Builder builder = new BooleanQuery.Builder(); - builder.setMinimumNumberShouldMatch(1); - for (Term nextCode : codes) { - builder.add(new TermQuery(nextCode), BooleanClause.Occur.SHOULD); + if (theExpansionFilter.hasCode()) { + b.must(predicate.match().field("myCode").matching(theExpansionFilter.getCode())); } - luceneQuery = new BooleanQuery.Builder() - .add(luceneQuery, BooleanClause.Occur.MUST) - .add(builder.build(), BooleanClause.Occur.MUST) - .build(); - } + String codeSystemUrlAndVersion = buildCodeSystemUrlAndVersion(theSystem, includeOrExcludeVersion); + for (ValueSet.ConceptSetFilterComponent nextFilter : theIncludeOrExclude.getFilter()) { + handleFilter(codeSystemUrlAndVersion, predicate, b, nextFilter); + } + for (ValueSet.ConceptSetFilterComponent nextFilter : theExpansionFilter.getFilters()) { + handleFilter(codeSystemUrlAndVersion, predicate, b, nextFilter); + } + }); - /* - * Execute the query - */ - FullTextQuery jpaQuery = em.createFullTextQuery(luceneQuery, TermConcept.class); + PredicateFinalStep expansionStep = buildExpansionPredicate(theIncludeOrExclude, predicate); + final PredicateFinalStep finishedQuery; + if (expansionStep == null) { + finishedQuery = step; + } else { + finishedQuery = predicate.bool().must(step).must(expansionStep); + } /* * DM 2019-08-21 - Processing slows after any ValueSets with many codes explicitly identified. This might * be due to the dark arts that is memory management. Will monitor but not do anything about this right now. */ - BooleanQuery.setMaxClauseCount(SearchBuilder.getMaximumPageSize()); + //BooleanQuery.setMaxClauseCount(SearchBuilder.getMaximumPageSize()); + //TODO GGG HS looks like we can't set max clause count, but it can be set server side. + //BooleanQuery.setMaxClauseCount(10000); StopWatch sw = new StopWatch(); AtomicInteger count = new AtomicInteger(0); @@ -995,22 +978,27 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc { } } - jpaQuery.setMaxResults(maxResultsPerBatch); - jpaQuery.setFirstResult(theQueryIndex * maxResultsPerBatch); +// jpaQuery.setMaxResults(maxResultsPerBatch); +// jpaQuery.setFirstResult(theQueryIndex * maxResultsPerBatch); ourLog.debug("Beginning batch expansion for {} with max results per batch: {}", (theAdd ? "inclusion" : "exclusion"), maxResultsPerBatch); StopWatch swForBatch = new StopWatch(); AtomicInteger countForBatch = new AtomicInteger(0); - List resultList = jpaQuery.getResultList(); - int resultsInBatch = resultList.size(); - int firstResult = jpaQuery.getFirstResult(); + SearchQuery termConceptsQuery = searchSession.search(TermConcept.class) + .where(f -> finishedQuery).toQuery(); + + System.out.println("About to query:" + termConceptsQuery.queryString()); + List termConcepts = termConceptsQuery.fetchHits(theQueryIndex * maxResultsPerBatch, maxResultsPerBatch); + + + int resultsInBatch = termConcepts.size(); + int firstResult = theQueryIndex * maxResultsPerBatch;// TODO GGG HS we lose the ability to check the index of the first result, so just best-guessing it here. int delta = 0; - for (Object next : resultList) { + for (TermConcept concept: termConcepts) { count.incrementAndGet(); countForBatch.incrementAndGet(); - TermConcept concept = (TermConcept) next; boolean added = addCodeIfNotAlreadyAdded(theValueSetCodeAccumulator, theAddedCodes, concept, theAdd, includeOrExcludeVersion); if (added) { delta++; @@ -1028,6 +1016,46 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc { } } + /** + * Helper method which builds a predicate for the expansion + */ + private PredicateFinalStep buildExpansionPredicate(ValueSet.ConceptSetComponent theTheIncludeOrExclude, SearchPredicateFactory thePredicate) { + PredicateFinalStep expansionStep; + /* + * Include/Exclude Concepts + */ + List codes = theTheIncludeOrExclude + .getConcept() + .stream() + .filter(Objects::nonNull) + .map(ValueSet.ConceptReferenceComponent::getCode) + .filter(StringUtils::isNotBlank) + .map(t -> new Term("myCode", t)) + .collect(Collectors.toList()); + + if (codes.size() > 0) { + expansionStep = thePredicate.bool(b -> { + b.minimumShouldMatchNumber(1); + for (Term code : codes) { + b.should(thePredicate.match().field(code.field()).matching(code.text())); + } + }); + return expansionStep; + } else { + return null; + } + } + + private String buildCodeSystemUrlAndVersion(String theSystem, String theIncludeOrExcludeVersion) { + String codeSystemUrlAndVersion; + if (theIncludeOrExcludeVersion != null) { + codeSystemUrlAndVersion = theSystem + "|" + theIncludeOrExcludeVersion; + } else { + codeSystemUrlAndVersion = theSystem; + } + return codeSystemUrlAndVersion; + } + private @Nonnull ValueSetExpansionOptions provideExpansionOptions(@Nullable ValueSetExpansionOptions theExpansionOptions) { if (theExpansionOptions != null) { @@ -1046,7 +1074,7 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc { } } - private void handleFilter(String theCodeSystemIdentifier, QueryBuilder theQb, BooleanJunction theBool, ValueSet.ConceptSetFilterComponent theFilter) { + private void handleFilter(String theCodeSystemIdentifier, SearchPredicateFactory theF, BooleanPredicateClausesStep theB, ValueSet.ConceptSetFilterComponent theFilter) { if (isBlank(theFilter.getValue()) && theFilter.getOp() == null && isBlank(theFilter.getProperty())) { return; } @@ -1058,258 +1086,36 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc { switch (theFilter.getProperty()) { case "display:exact": case "display": - handleFilterDisplay(theQb, theBool, theFilter); + handleFilterDisplay(theF, theB, theFilter); break; case "concept": case "code": - handleFilterConceptAndCode(theCodeSystemIdentifier, theQb, theBool, theFilter); + handleFilterConceptAndCode(theCodeSystemIdentifier, theF, theB, theFilter); break; case "parent": case "child": isCodeSystemLoincOrThrowInvalidRequestException(theCodeSystemIdentifier, theFilter.getProperty()); - handleFilterLoincParentChild(theBool, theFilter); + handleFilterLoincParentChild(theF, theB, theFilter); break; case "ancestor": isCodeSystemLoincOrThrowInvalidRequestException(theCodeSystemIdentifier, theFilter.getProperty()); - handleFilterLoincAncestor(theCodeSystemIdentifier, theBool, theFilter); + handleFilterLoincAncestor2(theCodeSystemIdentifier, theF, theB, theFilter); break; case "descendant": isCodeSystemLoincOrThrowInvalidRequestException(theCodeSystemIdentifier, theFilter.getProperty()); - handleFilterLoincDescendant(theCodeSystemIdentifier, theBool, theFilter); + handleFilterLoincDescendant(theCodeSystemIdentifier, theF, theB, theFilter); break; case "copyright": isCodeSystemLoincOrThrowInvalidRequestException(theCodeSystemIdentifier, theFilter.getProperty()); - handleFilterLoincCopyright(theBool, theFilter); + handleFilterLoincCopyright(theF, theB, theFilter); break; default: - handleFilterRegex(theBool, theFilter); + handleFilterRegex(theF, theB, theFilter); break; } } - private void isCodeSystemLoincOrThrowInvalidRequestException(String theSystemIdentifier, String theProperty) { - String systemUrl = getUrlFromIdentifier(theSystemIdentifier); - if (!isCodeSystemLoinc(systemUrl)) { - throw new InvalidRequestException("Invalid filter, property " + theProperty + " is LOINC-specific and cannot be used with system: " + systemUrl); - } - } - - private boolean isCodeSystemLoinc(String theSystem) { - return ITermLoaderSvc.LOINC_URI.equals(theSystem); - } - - private void handleFilterDisplay(QueryBuilder theQb, BooleanJunction theBool, ValueSet.ConceptSetFilterComponent theFilter) { - if (theFilter.getProperty().equals("display:exact") && theFilter.getOp() == ValueSet.FilterOperator.EQUAL) { - addDisplayFilterExact(theQb, theBool, theFilter); - } else if (theFilter.getProperty().equals("display") && theFilter.getOp() == ValueSet.FilterOperator.EQUAL) { - if (theFilter.getValue().trim().contains(" ")) { - addDisplayFilterExact(theQb, theBool, theFilter); - } else { - addDisplayFilterInexact(theQb, theBool, theFilter); - } - } - } - - private void addDisplayFilterExact(QueryBuilder qb, BooleanJunction bool, ValueSet.ConceptSetFilterComponent nextFilter) { - bool.must(qb.phrase().onField("myDisplay").sentence(nextFilter.getValue()).createQuery()); - } - - private void addDisplayFilterInexact(QueryBuilder qb, BooleanJunction bool, ValueSet.ConceptSetFilterComponent nextFilter) { - Query textQuery = qb - .phrase() - .withSlop(2) - .onField("myDisplay").boostedTo(4.0f) - //.andField("myDisplayEdgeNGram").boostedTo(2.0f) - .andField("myDisplayWordEdgeNGram").boostedTo(1.0f) - // .andField("myDisplayPhonetic").boostedTo(0.5f) - .sentence(nextFilter.getValue().toLowerCase()).createQuery(); - bool.must(textQuery); - } - - private void handleFilterConceptAndCode(String theSystem, QueryBuilder theQb, BooleanJunction theBool, ValueSet.ConceptSetFilterComponent theFilter) { - TermConcept code = findCode(theSystem, theFilter.getValue()) - .orElseThrow(() -> new InvalidRequestException("Invalid filter criteria - code does not exist: {" + Constants.codeSystemWithDefaultDescription(theSystem) + "}" + theFilter.getValue())); - - if (theFilter.getOp() == ValueSet.FilterOperator.ISA) { - ourLog.debug(" * Filtering on codes with a parent of {}/{}/{}", code.getId(), code.getCode(), code.getDisplay()); - theBool.must(theQb.keyword().onField("myParentPids").matching("" + code.getId()).createQuery()); - } else { - throw new InvalidRequestException("Don't know how to handle op=" + theFilter.getOp() + " on property " + theFilter.getProperty()); - } - } - - @SuppressWarnings("EnumSwitchStatementWhichMissesCases") - private void handleFilterLoincParentChild(BooleanJunction theBool, ValueSet.ConceptSetFilterComponent theFilter) { - switch (theFilter.getOp()) { - case EQUAL: - addLoincFilterParentChildEqual(theBool, theFilter.getProperty(), theFilter.getValue()); - break; - case IN: - addLoincFilterParentChildIn(theBool, theFilter); - break; - default: - throw new InvalidRequestException("Don't know how to handle op=" + theFilter.getOp() + " on property " + theFilter.getProperty()); - } - } - - private void addLoincFilterParentChildEqual(BooleanJunction theBool, String theProperty, String theValue) { - logFilteringValueOnProperty(theValue, theProperty); - theBool.must(new TermsQuery(getPropertyTerm(theProperty, theValue))); - } - - private void addLoincFilterParentChildIn(BooleanJunction theBool, ValueSet.ConceptSetFilterComponent theFilter) { - String[] values = theFilter.getValue().split(","); - List terms = new ArrayList<>(); - for (String value : values) { - logFilteringValueOnProperty(value, theFilter.getProperty()); - terms.add(getPropertyTerm(theFilter.getProperty(), value)); - } - theBool.must(new TermsQuery(terms)); - } - - private Term getPropertyTerm(String theProperty, String theValue) { - return new Term(TermConceptPropertyFieldBridge.CONCEPT_FIELD_PROPERTY_PREFIX + theProperty, theValue); - } - - @SuppressWarnings("EnumSwitchStatementWhichMissesCases") - private void handleFilterLoincAncestor(String theSystem, BooleanJunction theBool, ValueSet.ConceptSetFilterComponent theFilter) { - switch (theFilter.getOp()) { - case EQUAL: - addLoincFilterAncestorEqual(theSystem, theBool, theFilter); - break; - case IN: - addLoincFilterAncestorIn(theSystem, theBool, theFilter); - break; - default: - throw new InvalidRequestException("Don't know how to handle op=" + theFilter.getOp() + " on property " + theFilter.getProperty()); - } - } - - private void addLoincFilterAncestorEqual(String theSystem, BooleanJunction theBool, ValueSet.ConceptSetFilterComponent theFilter) { - addLoincFilterAncestorEqual(theSystem, theBool, theFilter.getProperty(), theFilter.getValue()); - } - - private void addLoincFilterAncestorEqual(String theSystem, BooleanJunction theBool, String theProperty, String theValue) { - List terms = getAncestorTerms(theSystem, theProperty, theValue); - theBool.must(new TermsQuery(terms)); - } - - private void addLoincFilterAncestorIn(String theSystem, BooleanJunction theBool, ValueSet.ConceptSetFilterComponent theFilter) { - String[] values = theFilter.getValue().split(","); - List terms = new ArrayList<>(); - for (String value : values) { - terms.addAll(getAncestorTerms(theSystem, theFilter.getProperty(), value)); - } - theBool.must(new TermsQuery(terms)); - } - - private List getAncestorTerms(String theSystem, String theProperty, String theValue) { - List retVal = new ArrayList<>(); - - TermConcept code = findCode(theSystem, theValue) - .orElseThrow(() -> new InvalidRequestException("Invalid filter criteria - code does not exist: {" + Constants.codeSystemWithDefaultDescription(theSystem) + "}" + theValue)); - - retVal.add(new Term("myParentPids", "" + code.getId())); - logFilteringValueOnProperty(theValue, theProperty); - - return retVal; - } - - @SuppressWarnings("EnumSwitchStatementWhichMissesCases") - private void handleFilterLoincDescendant(String theSystem, BooleanJunction theBool, ValueSet.ConceptSetFilterComponent theFilter) { - switch (theFilter.getOp()) { - case EQUAL: - addLoincFilterDescendantEqual(theSystem, theBool, theFilter); - break; - case IN: - addLoincFilterDescendantIn(theSystem, theBool, theFilter); - break; - default: - throw new InvalidRequestException("Don't know how to handle op=" + theFilter.getOp() + " on property " + theFilter.getProperty()); - } - } - - private void addLoincFilterDescendantEqual(String theSystem, BooleanJunction theBool, ValueSet.ConceptSetFilterComponent theFilter) { - addLoincFilterDescendantEqual(theSystem, theBool, theFilter.getProperty(), theFilter.getValue()); - } - - private void addLoincFilterDescendantEqual(String theSystem, BooleanJunction theBool, String theProperty, String theValue) { - List terms = getDescendantTerms(theSystem, theProperty, theValue); - theBool.must(new TermsQuery(terms)); - } - - private void addLoincFilterDescendantIn(String theSystem, BooleanJunction theBool, ValueSet.ConceptSetFilterComponent theFilter) { - String[] values = theFilter.getValue().split(","); - List terms = new ArrayList<>(); - for (String value : values) { - terms.addAll(getDescendantTerms(theSystem, theFilter.getProperty(), value)); - } - theBool.must(new TermsQuery(terms)); - } - - private List getDescendantTerms(String theSystem, String theProperty, String theValue) { - List retVal = new ArrayList<>(); - - TermConcept code = findCode(theSystem, theValue) - .orElseThrow(() -> new InvalidRequestException("Invalid filter criteria - code does not exist: {" + Constants.codeSystemWithDefaultDescription(theSystem) + "}" + theValue)); - - String[] parentPids = code.getParentPidsAsString().split(" "); - for (String parentPid : parentPids) { - retVal.add(new Term("myId", parentPid)); - } - logFilteringValueOnProperty(theValue, theProperty); - - return retVal; - } - - private void handleFilterLoincCopyright(BooleanJunction theBool, ValueSet.ConceptSetFilterComponent theFilter) { - if (theFilter.getOp() == ValueSet.FilterOperator.EQUAL) { - - String copyrightFilterValue = defaultString(theFilter.getValue()).toLowerCase(); - switch (copyrightFilterValue) { - case "3rdparty": - logFilteringValueOnProperty(theFilter.getValue(), theFilter.getProperty()); - addFilterLoincCopyright3rdParty(theBool); - break; - case "loinc": - logFilteringValueOnProperty(theFilter.getValue(), theFilter.getProperty()); - addFilterLoincCopyrightLoinc(theBool); - break; - default: - throwInvalidRequestForValueOnProperty(theFilter.getValue(), theFilter.getProperty()); - } - - } else { - throwInvalidRequestForOpOnProperty(theFilter.getOp(), theFilter.getProperty()); - } - } - - private void addFilterLoincCopyright3rdParty(BooleanJunction theBool) { - theBool.must(getRegexQueryForFilterLoincCopyright()); - } - - private void addFilterLoincCopyrightLoinc(BooleanJunction theBool) { - theBool.must(getRegexQueryForFilterLoincCopyright()).not(); - } - - private RegexpQuery getRegexQueryForFilterLoincCopyright() { - Term term = new Term(TermConceptPropertyFieldBridge.CONCEPT_FIELD_PROPERTY_PREFIX + "EXTERNAL_COPYRIGHT_NOTICE", ".*"); - return new RegexpQuery(term); - } - - private void logFilteringValueOnProperty(String theValue, String theProperty) { - ourLog.debug(" * Filtering with value={} on property {}", theValue, theProperty); - } - - private void throwInvalidRequestForOpOnProperty(ValueSet.FilterOperator theOp, String theProperty) { - throw new InvalidRequestException("Don't know how to handle op=" + theOp + " on property " + theProperty); - } - - private void throwInvalidRequestForValueOnProperty(String theValue, String theProperty) { - throw new InvalidRequestException("Don't know how to handle value=" + theValue + " on property " + theProperty); - } - - private void handleFilterRegex(BooleanJunction theBool, ValueSet.ConceptSetFilterComponent theFilter) { + private void handleFilterRegex(SearchPredicateFactory theF, BooleanPredicateClausesStep theB, ValueSet.ConceptSetFilterComponent theFilter) { if (theFilter.getOp() == ValueSet.FilterOperator.REGEX) { /* @@ -1330,19 +1136,274 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc { value = value.substring(1); } - Term term = new Term(TermConceptPropertyFieldBridge.CONCEPT_FIELD_PROPERTY_PREFIX + theFilter.getProperty(), value); - RegexpQuery query = new RegexpQuery(term); - theBool.must(query); + Term term = new Term(TermConceptPropertyBinder.CONCEPT_FIELD_PROPERTY_PREFIX + theFilter.getProperty(), value); + if (isFullTextSetToUseElastic()) { + String regexpQuery = "{'regexp':{'" + term.field() + "':{'value':'" + term.text() + "'}}}"; + ourLog.debug("Build Elasticsearch Regexp Query: {}", regexpQuery); + theB.must(theF.extension(ElasticsearchExtension.get()).fromJson(regexpQuery)); + } else { + RegexpQuery query = new RegexpQuery(term); + theB.must(theF.extension(LuceneExtension.get()).fromLuceneQuery(query)); + } } else { - String value = theFilter.getValue(); - Term term = new Term(TermConceptPropertyFieldBridge.CONCEPT_FIELD_PROPERTY_PREFIX + theFilter.getProperty(), value); - theBool.must(new TermsQuery(term)); + Term term = new Term(TermConceptPropertyBinder.CONCEPT_FIELD_PROPERTY_PREFIX + theFilter.getProperty(), value); + theB.must(theF.match().field(term.field()).matching(term.text())); } } + private void handleFilterLoincCopyright(SearchPredicateFactory theF, BooleanPredicateClausesStep theB, ValueSet.ConceptSetFilterComponent theFilter) { + if (theFilter.getOp() == ValueSet.FilterOperator.EQUAL) { + + String copyrightFilterValue = defaultString(theFilter.getValue()).toLowerCase(); + switch (copyrightFilterValue) { + case "3rdparty": + logFilteringValueOnProperty(theFilter.getValue(), theFilter.getProperty()); + addFilterLoincCopyright3rdParty(theF, theB); + break; + case "loinc": + logFilteringValueOnProperty(theFilter.getValue(), theFilter.getProperty()); + addFilterLoincCopyrightLoinc(theF, theB); + break; + default: + throwInvalidRequestForValueOnProperty(theFilter.getValue(), theFilter.getProperty()); + } + + } else { + throwInvalidRequestForOpOnProperty(theFilter.getOp(), theFilter.getProperty()); + } + } + + private void addFilterLoincCopyrightLoinc(SearchPredicateFactory thePredicateFactory, BooleanPredicateClausesStep theBooleanClause) { + theBooleanClause.mustNot(thePredicateFactory.exists().field(TermConceptPropertyBinder.CONCEPT_FIELD_PROPERTY_PREFIX + "EXTERNAL_COPYRIGHT_NOTICE")); + } + + private void addFilterLoincCopyright3rdParty(SearchPredicateFactory thePredicateFactory, BooleanPredicateClausesStep theBooleanClause) { + //TODO GGG HS These used to be Term term = new Term(TermConceptPropertyBinder.CONCEPT_FIELD_PROPERTY_PREFIX + "EXTERNAL_COPYRIGHT_NOTICE", ".*");, which was lucene-specific. + //TODO GGG HS ask diederik if this is equivalent. + //This old .* regex is the same as an existence check on a field, which I've implemented here. + theBooleanClause.must(thePredicateFactory.exists().field(TermConceptPropertyBinder.CONCEPT_FIELD_PROPERTY_PREFIX + "EXTERNAL_COPYRIGHT_NOTICE")); + } + + private void handleFilterLoincAncestor2(String theSystem, SearchPredicateFactory f, BooleanPredicateClausesStep b, ValueSet.ConceptSetFilterComponent theFilter) { + switch (theFilter.getOp()) { + case EQUAL: + addLoincFilterAncestorEqual(theSystem, f, b, theFilter); + break; + case IN: + addLoincFilterAncestorIn(theSystem, f, b, theFilter); + break; + default: + throw new InvalidRequestException("Don't know how to handle op=" + theFilter.getOp() + " on property " + theFilter.getProperty()); + } + + } + + private void addLoincFilterAncestorEqual(String theSystem, SearchPredicateFactory f, BooleanPredicateClausesStep b, ValueSet.ConceptSetFilterComponent theFilter) { + addLoincFilterAncestorEqual(theSystem, f, b, theFilter.getProperty(), theFilter.getValue()); + } + + private void addLoincFilterAncestorEqual(String theSystem, SearchPredicateFactory f, BooleanPredicateClausesStep b, String theProperty, String theValue) { + List terms = getAncestorTerms(theSystem, theProperty, theValue); + b.must(f.bool(innerB -> terms.forEach(term -> innerB.should(f.match().field(term.field()).matching(term.text()))))); + } + + private void addLoincFilterAncestorIn(String theSystem, SearchPredicateFactory f, BooleanPredicateClausesStep b, ValueSet.ConceptSetFilterComponent theFilter) { + String[] values = theFilter.getValue().split(","); + List terms = new ArrayList<>(); + for (String value : values) { + terms.addAll(getAncestorTerms(theSystem, theFilter.getProperty(), value)); + } + b.must(f.bool(innerB -> terms.forEach(term -> innerB.should(f.match().field(term.field()).matching(term.text()))))); + + } + + private void handleFilterLoincParentChild(SearchPredicateFactory f, BooleanPredicateClausesStep b, ValueSet.ConceptSetFilterComponent theFilter) { + switch (theFilter.getOp()) { + case EQUAL: + addLoincFilterParentChildEqual(f, b, theFilter.getProperty(), theFilter.getValue()); + break; + case IN: + addLoincFilterParentChildIn(f, b, theFilter); + break; + default: + throw new InvalidRequestException("Don't know how to handle op=" + theFilter.getOp() + " on property " + theFilter.getProperty()); + } + } + + private void addLoincFilterParentChildIn(SearchPredicateFactory f, BooleanPredicateClausesStep b, ValueSet.ConceptSetFilterComponent theFilter) { + String[] values = theFilter.getValue().split(","); + List terms = new ArrayList<>(); + for (String value : values) { + logFilteringValueOnProperty(value, theFilter.getProperty()); + terms.add(getPropertyTerm(theFilter.getProperty(), value)); + } + + //TODO GGG HS: Not sure if this is the right equivalent...seems to be no equivalent to `TermsQuery` in HS6. + //Far as I'm aware, this is a single element of a MUST portion of a bool, which itself should contain a list of OR'ed options, e.g. + // shape == round && color == (green || blue) + b.must(f.bool(innerB -> terms.forEach(term -> innerB.should(f.match().field(term.field()).matching(term.text()))))); + } + + private void addLoincFilterParentChildEqual(SearchPredicateFactory f, BooleanPredicateClausesStep b, String theProperty, String theValue) { + logFilteringValueOnProperty(theValue, theProperty); + //TODO GGG HS: Not sure if this is the right equivalent...seems to be no equivalent to `TermsQuery` in HS6. + //b.must(new TermsQuery(getPropertyTerm(theProperty, theValue))); + //According to the DSL migration reference (https://docs.jboss.org/hibernate/search/6.0/migration/html_single/#queries-reference), + //Since this property is handled with a specific analyzer, I'm not sure a terms match here is actually correct. The analyzer is literally just a whitespace tokenizer here. + + b.must(f.match().field(TermConceptPropertyBinder.CONCEPT_FIELD_PROPERTY_PREFIX + theProperty).matching(theValue)); + } + + private void handleFilterConceptAndCode(String theSystem, SearchPredicateFactory f, BooleanPredicateClausesStep b, ValueSet.ConceptSetFilterComponent theFilter) { + + + TermConcept code = findCode(theSystem, theFilter.getValue()) + .orElseThrow(() -> new InvalidRequestException("Invalid filter criteria - code does not exist: {" + Constants.codeSystemWithDefaultDescription(theSystem) + "}" + theFilter.getValue())); + + if (theFilter.getOp() == ValueSet.FilterOperator.ISA) { + ourLog.debug(" * Filtering on codes with a parent of {}/{}/{}", code.getId(), code.getCode(), code.getDisplay()); + + b.must(f.match().field("myParentPids").matching("" + code.getId())); + } else { + throw new InvalidRequestException("Don't know how to handle op=" + theFilter.getOp() + " on property " + theFilter.getProperty()); + } + } + + + private void isCodeSystemLoincOrThrowInvalidRequestException(String theSystemIdentifier, String theProperty) { + String systemUrl = getUrlFromIdentifier(theSystemIdentifier); + if (!isCodeSystemLoinc(systemUrl)) { + throw new InvalidRequestException("Invalid filter, property " + theProperty + " is LOINC-specific and cannot be used with system: " + systemUrl); + } + } + + private boolean isCodeSystemLoinc(String theSystem) { + return ITermLoaderSvc.LOINC_URI.equals(theSystem); + } + + private void handleFilterDisplay(SearchPredicateFactory f, BooleanPredicateClausesStep b, ValueSet.ConceptSetFilterComponent theFilter) { + if (theFilter.getProperty().equals("display:exact") && theFilter.getOp() == ValueSet.FilterOperator.EQUAL) { + addDisplayFilterExact(f, b, theFilter); + } else if (theFilter.getProperty().equals("display") && theFilter.getOp() == ValueSet.FilterOperator.EQUAL) { + if (theFilter.getValue().trim().contains(" ")) { + addDisplayFilterExact(f, b, theFilter); + } else { + addDisplayFilterInexact(f, b, theFilter); + } + } + } + + private void addDisplayFilterExact(SearchPredicateFactory f, BooleanPredicateClausesStep bool, ValueSet.ConceptSetFilterComponent nextFilter) { + bool.must(f.phrase().field("myDisplay").matching(nextFilter.getValue())); + } + + + + private void addDisplayFilterInexact(SearchPredicateFactory f, BooleanPredicateClausesStep bool, ValueSet.ConceptSetFilterComponent nextFilter) { + bool.must(f.phrase() + .field("myDisplay").boost(4.0f) + .field("myDisplayWordEdgeNGram").boost(1.0f) + .field("myDisplayEdgeNGram").boost(1.0f) + .matching(nextFilter.getValue().toLowerCase()) + .slop(2) + ); + } + + private Term getPropertyTerm(String theProperty, String theValue) { + return new Term(TermConceptPropertyBinder.CONCEPT_FIELD_PROPERTY_PREFIX + theProperty, theValue); + } + + private List getAncestorTerms(String theSystem, String theProperty, String theValue) { + List retVal = new ArrayList<>(); + + TermConcept code = findCode(theSystem, theValue) + .orElseThrow(() -> new InvalidRequestException("Invalid filter criteria - code does not exist: {" + Constants.codeSystemWithDefaultDescription(theSystem) + "}" + theValue)); + + retVal.add(new Term("myParentPids", "" + code.getId())); + logFilteringValueOnProperty(theValue, theProperty); + + return retVal; + } + + @SuppressWarnings("EnumSwitchStatementWhichMissesCases") + private void handleFilterLoincDescendant(String theSystem, SearchPredicateFactory f, BooleanPredicateClausesStep b, ValueSet.ConceptSetFilterComponent theFilter) { + switch (theFilter.getOp()) { + case EQUAL: + addLoincFilterDescendantEqual(theSystem, f, b, theFilter); + break; + case IN: + addLoincFilterDescendantIn(theSystem, f,b , theFilter); + break; + default: + throw new InvalidRequestException("Don't know how to handle op=" + theFilter.getOp() + " on property " + theFilter.getProperty()); + } + } + + + private void addLoincFilterDescendantEqual(String theSystem, SearchPredicateFactory f, BooleanPredicateClausesStep b, ValueSet.ConceptSetFilterComponent theFilter) { + addLoincFilterDescendantEqual(theSystem, f, b, theFilter.getProperty(), theFilter.getValue()); + } + + private void addLoincFilterDescendantIn(String theSystem, SearchPredicateFactory f, BooleanPredicateClausesStep b, ValueSet.ConceptSetFilterComponent theFilter) { + String[] values = theFilter.getValue().split(","); + List terms = new ArrayList<>(); + for (String value : values) { + terms.addAll(getDescendantTerms(theSystem, theFilter.getProperty(), value)); + } + searchByParentPids(f, b, terms); + } + + private void addLoincFilterDescendantEqual(String theSystem, SearchPredicateFactory f, BooleanPredicateClausesStep b, String theProperty, String theValue) { + List terms = getDescendantTerms(theSystem, theProperty, theValue); + searchByParentPids(f, b, terms); + } + + private void searchByParentPids(SearchPredicateFactory f, BooleanPredicateClausesStep b, List theTerms) { + List parentPids = convertTermsToParentPids(theTerms); + b.must(f.bool(innerB -> { + parentPids.forEach(pid -> innerB.should(f.match().field(theTerms.get(0).field()).matching(pid))); + })); + } + + private List convertTermsToParentPids(List theTerms) { + return theTerms.stream().map(Term::text).map(Long::valueOf).collect(Collectors.toList()); + } + + + private List getDescendantTerms(String theSystem, String theProperty, String theValue) { + List retVal = new ArrayList<>(); + + TermConcept code = findCode(theSystem, theValue) + .orElseThrow(() -> new InvalidRequestException("Invalid filter criteria - code does not exist: {" + Constants.codeSystemWithDefaultDescription(theSystem) + "}" + theValue)); + + String[] parentPids = code.getParentPidsAsString().split(" "); + for (String parentPid : parentPids) { + if (!StringUtils.equals(parentPid, "NONE")) { + retVal.add(new Term("myId", parentPid)); + } + } + logFilteringValueOnProperty(theValue, theProperty); + + return retVal; + } + + + + private void logFilteringValueOnProperty(String theValue, String theProperty) { + ourLog.debug(" * Filtering with value={} on property {}", theValue, theProperty); + } + + private void throwInvalidRequestForOpOnProperty(ValueSet.FilterOperator theOp, String theProperty) { + throw new InvalidRequestException("Don't know how to handle op=" + theOp + " on property " + theProperty); + } + + private void throwInvalidRequestForValueOnProperty(String theValue, String theProperty) { + throw new InvalidRequestException("Don't know how to handle value=" + theValue + " on property " + theProperty); + } + private void expandWithoutHibernateSearch(IValueSetConceptAccumulator theValueSetCodeAccumulator, TermCodeSystemVersion theVersion, Set theAddedCodes, ValueSet.ConceptSetComponent theInclude, String theSystem, boolean theAdd) { ourLog.trace("Hibernate search is not enabled"); @@ -1665,7 +1726,6 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc { } public void scheduleJob() { - // TODO KHS what does this mean? // Register scheduled job to pre-expand ValueSets // In the future it would be great to make this a cluster-aware task somehow ScheduledJobDefinition vsJobDefinition = new ScheduledJobDefinition(); @@ -2054,12 +2114,12 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc { TermConcept codeB = findCode(codeBSystemIdentifier, conceptB.getCode()) .orElseThrow(() -> new InvalidRequestException("Unknown code: " + conceptB)); - FullTextEntityManager em = org.hibernate.search.jpa.Search.getFullTextEntityManager(myEntityManager); + SearchSession searchSession = Search.session(myEntityManager); ConceptSubsumptionOutcome subsumes; - subsumes = testForSubsumption(em, codeA, codeB, ConceptSubsumptionOutcome.SUBSUMES); + subsumes = testForSubsumption(searchSession, codeA, codeB, ConceptSubsumptionOutcome.SUBSUMES); if (subsumes == null) { - subsumes = testForSubsumption(em, codeB, codeA, ConceptSubsumptionOutcome.SUBSUMEDBY); + subsumes = testForSubsumption(searchSession, codeB, codeA, ConceptSubsumptionOutcome.SUBSUMEDBY); } if (subsumes == null) { subsumes = ConceptSubsumptionOutcome.NOTSUBSUMED; @@ -2116,20 +2176,21 @@ public abstract class BaseTermReadSvcImpl implements ITermReadSvc { } @Nullable - private ConceptSubsumptionOutcome testForSubsumption(FullTextEntityManager theEntityManager, TermConcept theLeft, TermConcept theRight, ConceptSubsumptionOutcome theOutput) { - QueryBuilder qb = theEntityManager.getSearchFactory().buildQueryBuilder().forEntity(TermConcept.class).get(); - BooleanJunction bool = qb.bool(); - bool.must(qb.keyword().onField("myId").matching(Long.toString(theRight.getId())).createQuery()); - bool.must(qb.keyword().onField("myParentPids").matching(Long.toString(theLeft.getId())).createQuery()); - Query luceneQuery = bool.createQuery(); - FullTextQuery jpaQuery = theEntityManager.createFullTextQuery(luceneQuery, TermConcept.class); - jpaQuery.setMaxResults(1); - if (jpaQuery.getResultList().size() > 0) { + private ConceptSubsumptionOutcome testForSubsumption(SearchSession theSearchSession, TermConcept theLeft, TermConcept theRight, ConceptSubsumptionOutcome theOutput) { + List fetch = theSearchSession.search(TermConcept.class) + .where(f -> f.bool() + .must(f.match().field("myId").matching(theRight.getId())) + .must(f.match().field("myParentPids").matching(Long.toString(theLeft.getId()))) + ).fetchHits(1); + + if (fetch.size() > 0) { return theOutput; + } else { + return null; } - return null; } + private ArrayList toVersionIndependentConcepts(String theSystem, Set codes) { ArrayList retVal = new ArrayList<>(codes.size()); for (TermConcept next : codes) { diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermCodeSystemStorageSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermCodeSystemStorageSvcImpl.java index f897f50130a..ca03adf93bb 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermCodeSystemStorageSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermCodeSystemStorageSvcImpl.java @@ -25,7 +25,6 @@ import ca.uhn.fhir.context.FhirVersionEnum; import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.api.config.DaoConfig; import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao; -import ca.uhn.fhir.jpa.dao.IHapiJpaRepository; import ca.uhn.fhir.jpa.dao.data.IResourceTableDao; import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemDao; import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemVersionDao; @@ -52,7 +51,6 @@ import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId; import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException; import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException; import ca.uhn.fhir.util.ObjectUtil; -import ca.uhn.fhir.util.StopWatch; import ca.uhn.fhir.util.ValidateUtil; import org.apache.commons.lang3.Validate; import org.hl7.fhir.instance.model.api.IIdType; @@ -62,19 +60,17 @@ import org.hl7.fhir.r4.model.ValueSet; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.data.domain.PageRequest; -import org.springframework.data.domain.Slice; import org.springframework.transaction.PlatformTransactionManager; -import org.springframework.transaction.TransactionDefinition; import org.springframework.transaction.annotation.Propagation; import org.springframework.transaction.annotation.Transactional; -import org.springframework.transaction.support.TransactionTemplate; +import org.springframework.transaction.support.TransactionSynchronizationManager; import javax.annotation.Nonnull; import javax.persistence.EntityManager; import javax.persistence.PersistenceContext; import javax.persistence.PersistenceContextType; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Date; @@ -83,13 +79,16 @@ import java.util.HashSet; import java.util.IdentityHashMap; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.Optional; import java.util.Set; -import java.util.concurrent.TimeUnit; +import java.util.UUID; import java.util.concurrent.atomic.AtomicInteger; -import java.util.function.Supplier; import java.util.stream.Collectors; +import static org.apache.commons.lang3.StringUtils.defaultIfBlank; +import static org.apache.commons.lang3.StringUtils.defaultString; +import static org.apache.commons.lang3.StringUtils.isBlank; import static org.apache.commons.lang3.StringUtils.isNotBlank; public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc { @@ -183,57 +182,116 @@ public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc { if (cs == null) { throw new InvalidRequestException("Unknown code system: " + theSystem); } + IIdType target = cs.getResource().getIdDt(); AtomicInteger removeCounter = new AtomicInteger(0); - for (TermConcept nextSuppliedConcept : theValue.getRootConcepts()) { - Optional conceptOpt = myTerminologySvc.findCode(theSystem, nextSuppliedConcept.getCode()); - if (conceptOpt.isPresent()) { - TermConcept concept = conceptOpt.get(); - deleteConceptChildrenAndConcept(concept, removeCounter); - } + //We need to delete all termconcepts, and their children. This stream flattens the TermConcepts and their + //children into a single set of TermConcept objects retrieved from the DB. Note that we have to do this because + //deleteById() in JPA doesnt appear to actually commit or flush a transaction until way later, and we end up + //iterating multiple times over the same elements, which screws up our counter. + + + //Grab the actual entities + List collect = theValue.getRootConcepts().stream() + .map(val -> myTerminologySvc.findCode(theSystem, val.getCode())) + .filter(Optional::isPresent) + .map(Optional::get) + .collect(Collectors.toList()); + + //Iterate over the actual entities and fill out their children + Set allFoundTermConcepts = collect + .stream() + .flatMap(concept -> flattenChildren(concept).stream()) + .map(suppliedTermConcept -> myTerminologySvc.findCode(theSystem, suppliedTermConcept.getCode())) + .filter(Optional::isPresent) + .map(Optional::get) + .collect(Collectors.toSet()); + + //Delete everything about these codes. + for (TermConcept code : allFoundTermConcepts) { + deleteEverythingRelatedToConcept(code, removeCounter); } - IIdType target = cs.getResource().getIdDt(); return new UploadStatistics(removeCounter.get(), target); } + private void deleteEverythingRelatedToConcept(TermConcept theConcept, AtomicInteger theRemoveCounter) { + + for (TermConceptParentChildLink nextParent : theConcept.getParents()) { + nextParent.getParent().getChildren().remove(nextParent); + myConceptParentChildLinkDao.deleteById(nextParent.getId()); + } + for (TermConceptParentChildLink nextChild : theConcept.getChildren()) { + nextChild.getChild().getParents().remove(nextChild); + myConceptParentChildLinkDao.deleteById(nextChild.getId()); + } + + for (TermConceptDesignation next : theConcept.getDesignations()) { + myConceptDesignationDao.deleteById(next.getPid()); + } + theConcept.getDesignations().clear(); + for (TermConceptProperty next : theConcept.getProperties()) { + myConceptPropertyDao.deleteById(next.getPid()); + } + theConcept.getProperties().clear(); + + ourLog.info("Deleting concept {} - Code {}", theConcept.getId(), theConcept.getCode()); + + myConceptDao.deleteById(theConcept.getId()); +// myEntityManager.remove(theConcept); + + theRemoveCounter.incrementAndGet(); + } + + private List flattenChildren(TermConcept theTermConcept) { + if (theTermConcept.getChildren().isEmpty()) { + return Arrays.asList(theTermConcept); + } + + //Recursively flatten children + List childTermConcepts = theTermConcept.getChildren().stream() + .map(TermConceptParentChildLink::getChild) + .flatMap(childConcept -> flattenChildren(childConcept).stream()) + .collect(Collectors.toList()); + + //Add itself before its list of children + childTermConcepts.add(0, theTermConcept); + return childTermConcepts; + } + @Override - @Transactional(propagation = Propagation.NEVER) + @Transactional public void deleteCodeSystem(TermCodeSystem theCodeSystem) { + assert TransactionSynchronizationManager.isActualTransactionActive(); + ourLog.info(" * Deleting code system {}", theCodeSystem.getPid()); - TransactionTemplate txTemplate = new TransactionTemplate(myTransactionManager); - txTemplate.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRED); - txTemplate.executeWithoutResult(t -> { - myEntityManager.flush(); - TermCodeSystem cs = myCodeSystemDao.findById(theCodeSystem.getPid()).orElseThrow(IllegalStateException::new); - cs.setCurrentVersion(null); - myCodeSystemDao.save(cs); - myCodeSystemDao.flush(); - }); + myEntityManager.flush(); + TermCodeSystem cs = myCodeSystemDao.findById(theCodeSystem.getPid()).orElseThrow(IllegalStateException::new); + cs.setCurrentVersion(null); + myCodeSystemDao.save(cs); + myCodeSystemDao.flush(); - List codeSystemVersionPids = txTemplate.execute(t -> { - List codeSystemVersions = myCodeSystemVersionDao.findByCodeSystemPid(theCodeSystem.getPid()); - return codeSystemVersions - .stream() - .map(v -> v.getPid()) - .collect(Collectors.toList()); - }); + List codeSystemVersions = myCodeSystemVersionDao.findByCodeSystemPid(theCodeSystem.getPid()); + List codeSystemVersionPids = codeSystemVersions + .stream() + .map(TermCodeSystemVersion::getPid) + .collect(Collectors.toList()); for (Long next : codeSystemVersionPids) { deleteCodeSystemVersion(next); } - txTemplate.executeWithoutResult(t -> { - myCodeSystemVersionDao.deleteForCodeSystem(theCodeSystem); - myCodeSystemDao.delete(theCodeSystem); - myEntityManager.flush(); - }); + myCodeSystemVersionDao.deleteForCodeSystem(theCodeSystem); + myCodeSystemDao.delete(theCodeSystem); + myEntityManager.flush(); } @Override @Transactional(propagation = Propagation.NEVER) public void deleteCodeSystemVersion(TermCodeSystemVersion theCodeSystemVersion) { + assert !TransactionSynchronizationManager.isActualTransactionActive(); + // Delete TermCodeSystemVersion ourLog.info(" * Deleting TermCodeSystemVersion {}", theCodeSystemVersion.getCodeSystemVersionId()); deleteCodeSystemVersion(theCodeSystemVersion.getPid()); @@ -322,11 +380,15 @@ public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc { } @Override - @Transactional(propagation = Propagation.REQUIRED) + @Transactional public IIdType storeNewCodeSystemVersion(CodeSystem theCodeSystemResource, TermCodeSystemVersion theCodeSystemVersion, RequestDetails theRequest, List theValueSets, List theConceptMaps) { + assert TransactionSynchronizationManager.isActualTransactionActive(); + Validate.notBlank(theCodeSystemResource.getUrl(), "theCodeSystemResource must have a URL"); + // Note that this creates the TermCodeSystem and TermCodeSystemVersion entities if needed IIdType csId = myTerminologyVersionAdapterSvc.createOrUpdateCodeSystem(theCodeSystemResource); + ResourcePersistentId codeSystemResourcePid = myIdHelperService.resolveResourcePersistentIds(RequestPartitionId.allPartitions(), csId.getResourceType(), csId.getIdPart()); ResourceTable resource = myResourceTableDao.getOne(codeSystemResourcePid.getIdAsLong()); @@ -343,35 +405,52 @@ public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc { } @Override - @Transactional(propagation = Propagation.REQUIRED) - public void storeNewCodeSystemVersion(ResourcePersistentId theCodeSystemResourcePid, String theSystemUri, String theSystemName, String theSystemVersionId, TermCodeSystemVersion theCodeSystemVersion, ResourceTable theCodeSystemResourceTable) { + @Transactional + public void storeNewCodeSystemVersion(ResourcePersistentId theCodeSystemResourcePid, String theSystemUri, String theSystemName, String theCodeSystemVersionId, TermCodeSystemVersion theCodeSystemVersion, ResourceTable theCodeSystemResourceTable) { + assert TransactionSynchronizationManager.isActualTransactionActive(); + ourLog.debug("Storing code system"); - ValidateUtil.isTrueOrThrowInvalidRequest(theCodeSystemVersion.getResource() != null, "No resource supplied"); + TermCodeSystemVersion codeSystemToStore = theCodeSystemVersion; + ValidateUtil.isTrueOrThrowInvalidRequest(codeSystemToStore.getResource() != null, "No resource supplied"); ValidateUtil.isNotBlankOrThrowInvalidRequest(theSystemUri, "No system URI supplied"); - // Grab the existing version so we can delete it + TermCodeSystem codeSystem = getOrCreateDistinctTermCodeSystem(theCodeSystemResourcePid, theSystemUri, theSystemName, theCodeSystemVersionId, theCodeSystemResourceTable); + List existing = myCodeSystemVersionDao.findByCodeSystemResourcePid(theCodeSystemResourcePid.getIdAsLong()); - - /* - * Delete version being replaced. - */ - for (TermCodeSystemVersion next : existing) { - ourLog.info("Deleting old code system version {}", next.getPid()); - Long codeSystemVersionPid = next.getPid(); - deleteCodeSystemVersion(codeSystemVersionPid); + if (Objects.equals(next.getCodeSystemVersionId(), theCodeSystemVersionId) && myConceptDao.countByCodeSystemVersion(next.getPid()) == 0) { + + /* + * If we already have a CodeSystemVersion that matches the version we're storing, we + * can reuse it. + */ + next.setCodeSystemDisplayName(theSystemName); + codeSystemToStore = next; + + } else { + + /* + * If we already have a TermCodeSystemVersion that corresponds to the FHIR Resource ID we're + * adding a version to, we will mark it for deletion. For any one resource there can only + * be one TermCodeSystemVersion entity in the DB. Multiple versions of a codesystem uses + * multiple CodeSystem resources with CodeSystem.version set differently (as opposed to + * multiple versions of the same CodeSystem, where CodeSystem.meta.versionId is different) + */ + next.setCodeSystemVersionId("DELETED_" + UUID.randomUUID().toString()); + myCodeSystemVersionDao.saveAndFlush(next); + myDeferredStorageSvc.deleteCodeSystemVersion(next); + + } } /* * Do the upload */ - TermCodeSystem codeSystem = getOrCreateDistinctTermCodeSystem(theCodeSystemResourcePid, theSystemUri, theSystemName, theSystemVersionId, theCodeSystemResourceTable); - theCodeSystemVersion.setCodeSystem(codeSystem); - - theCodeSystemVersion.setCodeSystemDisplayName(theSystemName); - theCodeSystemVersion.setCodeSystemVersionId(theSystemVersionId); + codeSystemToStore.setCodeSystem(codeSystem); + codeSystemToStore.setCodeSystemDisplayName(theSystemName); + codeSystemToStore.setCodeSystemVersionId(theCodeSystemVersionId); ourLog.debug("Validating all codes in CodeSystem for storage (this can take some time for large sets)"); @@ -379,40 +458,42 @@ public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc { ArrayList conceptsStack = new ArrayList<>(); IdentityHashMap allConcepts = new IdentityHashMap<>(); int totalCodeCount = 0; - for (TermConcept next : theCodeSystemVersion.getConcepts()) { - totalCodeCount += validateConceptForStorage(next, theCodeSystemVersion, conceptsStack, allConcepts); + Collection conceptsToSave = theCodeSystemVersion.getConcepts(); + for (TermConcept next : conceptsToSave) { + totalCodeCount += validateConceptForStorage(next, codeSystemToStore, conceptsStack, allConcepts); } ourLog.debug("Saving version containing {} concepts", totalCodeCount); - - TermCodeSystemVersion codeSystemVersion = myCodeSystemVersionDao.saveAndFlush(theCodeSystemVersion); + if (codeSystemToStore.getPid() == null) { + codeSystemToStore = myCodeSystemVersionDao.saveAndFlush(codeSystemToStore); + } ourLog.debug("Saving code system"); - codeSystem.setCurrentVersion(theCodeSystemVersion); - codeSystem = myCodeSystemDao.saveAndFlush(codeSystem); + codeSystem.setCurrentVersion(codeSystemToStore); + if (codeSystem.getPid() == null) { + codeSystem = myCodeSystemDao.saveAndFlush(codeSystem); + } ourLog.debug("Setting CodeSystemVersion[{}] on {} concepts...", codeSystem.getPid(), totalCodeCount); - for (TermConcept next : theCodeSystemVersion.getConcepts()) { - populateVersion(next, codeSystemVersion); + for (TermConcept next : conceptsToSave) { + populateVersion(next, codeSystemToStore); } ourLog.debug("Saving {} concepts...", totalCodeCount); IdentityHashMap conceptsStack2 = new IdentityHashMap<>(); - for (TermConcept next : theCodeSystemVersion.getConcepts()) { - persistChildren(next, codeSystemVersion, conceptsStack2, totalCodeCount); + for (TermConcept next : conceptsToSave) { + persistChildren(next, codeSystemToStore, conceptsStack2, totalCodeCount); } ourLog.debug("Done saving concepts, flushing to database"); - myConceptDao.flush(); - myConceptParentChildLinkDao.flush(); - if (myDeferredStorageSvc.isStorageQueueEmpty() == false) { ourLog.info("Note that some concept saving has been deferred"); } + } private TermCodeSystemVersion getExistingTermCodeSystemVersion(Long theCodeSystemVersionPid, String theCodeSystemVersion) { @@ -427,60 +508,24 @@ public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc { } private void deleteCodeSystemVersion(final Long theCodeSystemVersionPid) { - ourLog.info(" * Deleting code system version {}", theCodeSystemVersionPid); + assert TransactionSynchronizationManager.isActualTransactionActive(); + ourLog.info(" * Marking code system version {} for deletion", theCodeSystemVersionPid); - PageRequest page1000 = PageRequest.of(0, 1000); - - // Parent/Child links - { - String descriptor = "parent/child links"; - Supplier> loader = () -> myConceptParentChildLinkDao.findIdsByCodeSystemVersion(page1000, theCodeSystemVersionPid); - Supplier counter = () -> myConceptParentChildLinkDao.countByCodeSystemVersion(theCodeSystemVersionPid); - doDelete(descriptor, loader, counter, myConceptParentChildLinkDao); - } - - // Properties - { - String descriptor = "concept properties"; - Supplier> loader = () -> myConceptPropertyDao.findIdsByCodeSystemVersion(page1000, theCodeSystemVersionPid); - Supplier counter = () -> myConceptPropertyDao.countByCodeSystemVersion(theCodeSystemVersionPid); - doDelete(descriptor, loader, counter, myConceptPropertyDao); - } - - // Designations - { - String descriptor = "concept designations"; - Supplier> loader = () -> myConceptDesignationDao.findIdsByCodeSystemVersion(page1000, theCodeSystemVersionPid); - Supplier counter = () -> myConceptDesignationDao.countByCodeSystemVersion(theCodeSystemVersionPid); - doDelete(descriptor, loader, counter, myConceptDesignationDao); - } - - // Concepts - { - String descriptor = "concepts"; - // For some reason, concepts are much slower to delete, so use a smaller batch size - PageRequest page100 = PageRequest.of(0, 100); - Supplier> loader = () -> myConceptDao.findIdsByCodeSystemVersion(page100, theCodeSystemVersionPid); - Supplier counter = () -> myConceptDao.countByCodeSystemVersion(theCodeSystemVersionPid); - doDelete(descriptor, loader, counter, myConceptDao); - } - - TransactionTemplate txTemplate = new TransactionTemplate(myTransactionManager); - txTemplate.executeWithoutResult(tx -> { - Optional codeSystemOpt = myCodeSystemDao.findWithCodeSystemVersionAsCurrentVersion(theCodeSystemVersionPid); - if (codeSystemOpt.isPresent()) { - TermCodeSystem codeSystem = codeSystemOpt.get(); + Optional codeSystemOpt = myCodeSystemDao.findWithCodeSystemVersionAsCurrentVersion(theCodeSystemVersionPid); + if (codeSystemOpt.isPresent()) { + TermCodeSystem codeSystem = codeSystemOpt.get(); + if (codeSystem.getCurrentVersion() != null && codeSystem.getCurrentVersion().getPid().equals(theCodeSystemVersionPid)) { ourLog.info(" * Removing code system version {} as current version of code system {}", theCodeSystemVersionPid, codeSystem.getPid()); codeSystem.setCurrentVersion(null); myCodeSystemDao.save(codeSystem); - myCodeSystemDao.flush(); } + } - ourLog.info(" * Deleting code system version"); - myCodeSystemVersionDao.delete(theCodeSystemVersionPid); - myCodeSystemVersionDao.flush(); - }); + TermCodeSystemVersion codeSystemVersion = myCodeSystemVersionDao.findById(theCodeSystemVersionPid).orElseThrow(() -> new IllegalStateException()); + codeSystemVersion.setCodeSystemVersionId("DELETED_" + UUID.randomUUID().toString()); + myCodeSystemVersionDao.save(codeSystemVersion); + myDeferredStorageSvc.deleteCodeSystemVersion(codeSystemVersion); } private void validateDstu3OrNewer() { @@ -635,13 +680,12 @@ public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc { } private void populateVersion(TermConcept theNext, TermCodeSystemVersion theCodeSystemVersion) { - if (theNext.getCodeSystemVersion() != null) { - return; - } theNext.setCodeSystemVersion(theCodeSystemVersion); for (TermConceptParentChildLink next : theNext.getChildren()) { populateVersion(next.getChild(), theCodeSystemVersion); } + theNext.getProperties().forEach(t->t.setCodeSystemVersion(theCodeSystemVersion)); + theNext.getDesignations().forEach(t->t.setCodeSystemVersion(theCodeSystemVersion)); } private void saveConceptLink(TermConceptParentChildLink next) { @@ -702,7 +746,7 @@ public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc { // Check if a TermCodeSystemVersion entity already exists for this TermCodeSystem and version. codeSystemVersionEntity = myCodeSystemVersionDao.findByCodeSystemPidAndVersion(theCodeSystem.getPid(), theSystemVersionId); if (codeSystemVersionEntity != null) { - msg = myContext.getLocalizer().getMessage(BaseTermReadSvcImpl.class, "cannotCreateDuplicateCodeSystemUrlAndVersion", theSystemUri, theSystemVersionId, codeSystemVersionEntity.getResource().getIdDt().toUnqualifiedVersionless().getValue()); + msg = myContext.getLocalizer().getMessage(BaseTermReadSvcImpl.class, "cannotCreateDuplicateCodeSystemUrlAndVersion", theSystemUri, theSystemVersionId, codeSystemVersionEntity.getResource().getIdDt().toUnqualifiedVersionless().getValue()); } } // Throw exception if the TermCodeSystemVersion is being duplicated. @@ -719,58 +763,13 @@ public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc { theCodeSystemVersion.setCodeSystemVersionId(theCodeSystemResource.getVersion()); } - private void deleteConceptChildrenAndConcept(TermConcept theConcept, AtomicInteger theRemoveCounter) { - for (TermConceptParentChildLink nextChildLink : theConcept.getChildren()) { - deleteConceptChildrenAndConcept(nextChildLink.getChild(), theRemoveCounter); - } - myConceptParentChildLinkDao.deleteByConceptPid(theConcept.getId()); - - myConceptDesignationDao.deleteAll(theConcept.getDesignations()); - myConceptPropertyDao.deleteAll(theConcept.getProperties()); - - ourLog.info("Deleting concept {} - Code {}", theConcept.getId(), theConcept.getCode()); - myConceptDao.deleteByPid(theConcept.getId()); - theRemoveCounter.incrementAndGet(); - } - - - @SuppressWarnings("ConstantConditions") - private void doDelete(String theDescriptor, Supplier> theLoader, Supplier theCounter, IHapiJpaRepository theDao) { - TransactionTemplate txTemplate = new TransactionTemplate(myTransactionManager); - txTemplate.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRED); - - int count; - ourLog.info(" * Deleting {}", theDescriptor); - int totalCount = txTemplate.execute(t -> theCounter.get()); - StopWatch sw = new StopWatch(); - count = 0; - while (true) { - Slice link = txTemplate.execute(t -> theLoader.get()); - if (!link.hasContent()) { - break; - } - - txTemplate.execute(t -> { - link.forEach(id -> theDao.deleteByPid(id)); - theDao.flush(); - return null; - }); - - count += link.getNumberOfElements(); - ourLog.info(" * {} {} deleted ({}/{}) remaining - {}/sec - ETA: {}", count, theDescriptor, count, totalCount, sw.formatThroughput(count, TimeUnit.SECONDS), sw.getEstimatedTimeRemaining(count, totalCount)); - - } - - } - - - private int validateConceptForStorage(TermConcept theConcept, TermCodeSystemVersion theCodeSystem, ArrayList theConceptsStack, + private int validateConceptForStorage(TermConcept theConcept, TermCodeSystemVersion theCodeSystemVersion, ArrayList theConceptsStack, IdentityHashMap theAllConcepts) { ValidateUtil.isTrueOrThrowInvalidRequest(theConcept.getCodeSystemVersion() != null, "CodeSystemVersion is null"); - ValidateUtil.isTrueOrThrowInvalidRequest(theConcept.getCodeSystemVersion() == theCodeSystem, "CodeSystems are not equal"); ValidateUtil.isNotBlankOrThrowInvalidRequest(theConcept.getCode(), "CodeSystem contains a code with no code value"); + theConcept.setCodeSystemVersion(theCodeSystemVersion); if (theConceptsStack.contains(theConcept.getCode())) { throw new InvalidRequestException("CodeSystem contains circular reference around code " + theConcept.getCode()); } @@ -785,8 +784,8 @@ public class TermCodeSystemStorageSvcImpl implements ITermCodeSystemStorageSvc { } for (TermConceptParentChildLink next : theConcept.getChildren()) { - next.setCodeSystem(theCodeSystem); - retVal += validateConceptForStorage(next.getChild(), theCodeSystem, theConceptsStack, theAllConcepts); + next.setCodeSystem(theCodeSystemVersion); + retVal += validateConceptForStorage(next.getChild(), theCodeSystemVersion, theConceptsStack, theAllConcepts); } theConceptsStack.remove(theConceptsStack.size() - 1); diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermDeferredStorageSvcImpl.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermDeferredStorageSvcImpl.java index 2475229eb12..1b9f9305109 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermDeferredStorageSvcImpl.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/TermDeferredStorageSvcImpl.java @@ -23,7 +23,9 @@ package ca.uhn.fhir.jpa.term; import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemDao; import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemVersionDao; import ca.uhn.fhir.jpa.dao.data.ITermConceptDao; +import ca.uhn.fhir.jpa.dao.data.ITermConceptDesignationDao; import ca.uhn.fhir.jpa.dao.data.ITermConceptParentChildLinkDao; +import ca.uhn.fhir.jpa.dao.data.ITermConceptPropertyDao; import ca.uhn.fhir.jpa.entity.TermCodeSystem; import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion; import ca.uhn.fhir.jpa.entity.TermConcept; @@ -44,31 +46,27 @@ import org.quartz.JobExecutionContext; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.data.domain.PageRequest; +import org.springframework.data.domain.Slice; +import org.springframework.data.jpa.repository.JpaRepository; import org.springframework.transaction.PlatformTransactionManager; -import org.springframework.transaction.TransactionDefinition; import org.springframework.transaction.annotation.Propagation; import org.springframework.transaction.annotation.Transactional; +import org.springframework.transaction.support.TransactionSynchronizationManager; import org.springframework.transaction.support.TransactionTemplate; import javax.annotation.PostConstruct; import java.util.ArrayList; import java.util.Collections; import java.util.List; +import java.util.Optional; import java.util.UUID; import java.util.concurrent.TimeUnit; +import java.util.function.Supplier; public class TermDeferredStorageSvcImpl implements ITermDeferredStorageSvc { private static final Logger ourLog = LoggerFactory.getLogger(TermDeferredStorageSvcImpl.class); - @Autowired - protected ITermConceptDao myConceptDao; - @Autowired - protected ITermCodeSystemDao myCodeSystemDao; - @Autowired - protected ITermCodeSystemVersionDao myCodeSystemVersionDao; - @Autowired - protected PlatformTransactionManager myTransactionMgr; - private boolean myProcessDeferred = true; final private List myDeferredCodeSystemsDeletions = Collections.synchronizedList(new ArrayList<>()); final private List myDeferredCodeSystemVersionsDeletions = Collections.synchronizedList(new ArrayList<>()); final private List myDeferredConcepts = Collections.synchronizedList(new ArrayList<>()); @@ -76,6 +74,19 @@ public class TermDeferredStorageSvcImpl implements ITermDeferredStorageSvc { final private List myDeferredConceptMaps = Collections.synchronizedList(new ArrayList<>()); final private List myConceptLinksToSaveLater = Collections.synchronizedList(new ArrayList<>()); @Autowired + protected ITermConceptDao myConceptDao; + @Autowired + protected ITermCodeSystemDao myCodeSystemDao; + @Autowired + protected ITermCodeSystemVersionDao myCodeSystemVersionDao; + @Autowired + protected PlatformTransactionManager myTransactionMgr; + @Autowired + protected ITermConceptPropertyDao myConceptPropertyDao; + @Autowired + protected ITermConceptDesignationDao myConceptDesignationDao; + private boolean myProcessDeferred = true; + @Autowired private ITermConceptParentChildLinkDao myConceptParentChildLinkDao; @Autowired private ISchedulerService mySchedulerService; @@ -120,7 +131,7 @@ public class TermDeferredStorageSvcImpl implements ITermDeferredStorageSvc { @Transactional public void deleteCodeSystemForResource(ResourceTable theCodeSystemToDelete) { List codeSystemVersionsToDelete = myCodeSystemVersionDao.findByCodeSystemResourcePid(theCodeSystemToDelete.getResourceId()); - for (TermCodeSystemVersion codeSystemVersionToDelete : codeSystemVersionsToDelete){ + for (TermCodeSystemVersion codeSystemVersionToDelete : codeSystemVersionsToDelete) { if (codeSystemVersionToDelete != null) { myDeferredCodeSystemVersionsDeletions.add(codeSystemVersionToDelete); } @@ -131,13 +142,6 @@ public class TermDeferredStorageSvcImpl implements ITermDeferredStorageSvc { } } - @Override - public void saveAllDeferred() { - while (!isStorageQueueEmpty()) { - saveDeferred(); - } - } - @Override public void setProcessDeferred(boolean theProcessDeferred) { myProcessDeferred = theProcessDeferred; @@ -161,7 +165,7 @@ public class TermDeferredStorageSvcImpl implements ITermDeferredStorageSvc { ourLog.debug("Saving {} deferred concepts...", count); while (codeCount < count && myDeferredConcepts.size() > 0) { TermConcept next = myDeferredConcepts.remove(0); - if(myCodeSystemVersionDao.findById(next.getCodeSystemVersion().getPid()).isPresent()) { + if (myCodeSystemVersionDao.findById(next.getCodeSystemVersion().getPid()).isPresent()) { try { codeCount += myCodeSystemStorageSvc.saveConcept(next); } catch (Exception theE) { @@ -232,6 +236,25 @@ public class TermDeferredStorageSvcImpl implements ITermDeferredStorageSvc { myDeferredCodeSystemVersionsDeletions.clear(); } + private void runInTransaction(Runnable theRunnable) { + assert !TransactionSynchronizationManager.isActualTransactionActive(); + + new TransactionTemplate(myTransactionMgr).executeWithoutResult(tx -> theRunnable.run()); + } + + private T runInTransaction(Supplier theRunnable) { + assert !TransactionSynchronizationManager.isActualTransactionActive(); + + return new TransactionTemplate(myTransactionMgr).execute(tx -> theRunnable.get()); + } + + @Override + public void saveAllDeferred() { + while (!isStorageQueueEmpty()) { + saveDeferred(); + } + } + @Transactional(propagation = Propagation.NEVER) @Override public synchronized void saveDeferred() { @@ -249,10 +272,8 @@ public class TermDeferredStorageSvcImpl implements ITermDeferredStorageSvc { return; } - TransactionTemplate tt = new TransactionTemplate(myTransactionMgr); - tt.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW); if (isDeferredConceptsOrConceptLinksToSaveLater()) { - tt.execute(t -> { + runInTransaction(() -> { processDeferredConcepts(); return null; }); @@ -261,7 +282,7 @@ public class TermDeferredStorageSvcImpl implements ITermDeferredStorageSvc { } if (isDeferredValueSets()) { - tt.execute(t -> { + runInTransaction(() -> { processDeferredValueSets(); return null; }); @@ -270,7 +291,7 @@ public class TermDeferredStorageSvcImpl implements ITermDeferredStorageSvc { } if (isDeferredConceptMaps()) { - tt.execute(t -> { + runInTransaction(() -> { processDeferredConceptMaps(); return null; }); @@ -278,25 +299,116 @@ public class TermDeferredStorageSvcImpl implements ITermDeferredStorageSvc { continue; } + if (isDeferredCodeSystemVersionDeletions()) { + processDeferredCodeSystemVersionDeletions(); + } + if (isDeferredCodeSystemDeletions()) { processDeferredCodeSystemDeletions(); } } } + private boolean isDeferredCodeSystemVersionDeletions() { + return !myDeferredCodeSystemVersionsDeletions.isEmpty(); + } + private void processDeferredCodeSystemDeletions() { - - for (TermCodeSystemVersion next : myDeferredCodeSystemVersionsDeletions) { - myCodeSystemStorageSvc.deleteCodeSystemVersion(next); - } - - myDeferredCodeSystemVersionsDeletions.clear(); for (TermCodeSystem next : myDeferredCodeSystemsDeletions) { myCodeSystemStorageSvc.deleteCodeSystem(next); } myDeferredCodeSystemsDeletions.clear(); } + private void processDeferredCodeSystemVersionDeletions() { + for (TermCodeSystemVersion next : myDeferredCodeSystemVersionsDeletions) { + processDeferredCodeSystemVersionDeletions(next.getPid()); + } + + myDeferredCodeSystemVersionsDeletions.clear(); + } + + private void processDeferredCodeSystemVersionDeletions(long theCodeSystemVersionPid) { + assert !TransactionSynchronizationManager.isActualTransactionActive(); + ourLog.info(" * Deleting CodeSystemVersion[id={}]", theCodeSystemVersionPid); + + PageRequest page1000 = PageRequest.of(0, 1000); + + // Parent/Child links + { + String descriptor = "parent/child links"; + Supplier> loader = () -> myConceptParentChildLinkDao.findIdsByCodeSystemVersion(page1000, theCodeSystemVersionPid); + Supplier counter = () -> myConceptParentChildLinkDao.countByCodeSystemVersion(theCodeSystemVersionPid); + doDelete(descriptor, loader, counter, myConceptParentChildLinkDao); + } + + // Properties + { + String descriptor = "concept properties"; + Supplier> loader = () -> myConceptPropertyDao.findIdsByCodeSystemVersion(page1000, theCodeSystemVersionPid); + Supplier counter = () -> myConceptPropertyDao.countByCodeSystemVersion(theCodeSystemVersionPid); + doDelete(descriptor, loader, counter, myConceptPropertyDao); + } + + // Designations + { + String descriptor = "concept designations"; + Supplier> loader = () -> myConceptDesignationDao.findIdsByCodeSystemVersion(page1000, theCodeSystemVersionPid); + Supplier counter = () -> myConceptDesignationDao.countByCodeSystemVersion(theCodeSystemVersionPid); + doDelete(descriptor, loader, counter, myConceptDesignationDao); + } + + // Concepts + { + String descriptor = "concepts"; + // For some reason, concepts are much slower to delete, so use a smaller batch size + PageRequest page100 = PageRequest.of(0, 100); + Supplier> loader = () -> myConceptDao.findIdsByCodeSystemVersion(page100, theCodeSystemVersionPid); + Supplier counter = () -> myConceptDao.countByCodeSystemVersion(theCodeSystemVersionPid); + doDelete(descriptor, loader, counter, myConceptDao); + } + + runInTransaction(() -> { + Optional codeSystemOpt = myCodeSystemDao.findWithCodeSystemVersionAsCurrentVersion(theCodeSystemVersionPid); + if (codeSystemOpt.isPresent()) { + TermCodeSystem codeSystem = codeSystemOpt.get(); + ourLog.info(" * Removing code system version {} as current version of code system {}", theCodeSystemVersionPid, codeSystem.getPid()); + codeSystem.setCurrentVersion(null); + myCodeSystemDao.save(codeSystem); + } + + ourLog.info(" * Deleting code system version"); + Optional csv = myCodeSystemVersionDao.findById(theCodeSystemVersionPid); + if (csv.isPresent()) { + myCodeSystemVersionDao.delete(csv.get()); + } + }); + + + } + + private void doDelete(String theDescriptor, Supplier> theLoader, Supplier theCounter, JpaRepository theDao) { + assert !TransactionSynchronizationManager.isActualTransactionActive(); + + int count; + ourLog.info(" * Deleting {}", theDescriptor); + int totalCount = runInTransaction(theCounter); + StopWatch sw = new StopWatch(); + count = 0; + while (true) { + Slice link = runInTransaction(theLoader); + if (!link.hasContent()) { + break; + } + + runInTransaction(() -> link.forEach(theDao::deleteById)); + + count += link.getNumberOfElements(); + ourLog.info(" * {} {} deleted ({}/{}) remaining - {}/sec - ETA: {}", count, theDescriptor, count, totalCount, sw.formatThroughput(count, TimeUnit.SECONDS), sw.getEstimatedTimeRemaining(count, totalCount)); + } + } + + @Override public boolean isStorageQueueEmpty() { boolean retVal = !isProcessDeferredPaused(); @@ -354,16 +466,6 @@ public class TermDeferredStorageSvcImpl implements ITermDeferredStorageSvc { mySchedulerService.scheduleLocalJob(5000, jobDefinition); } - public static class Job implements HapiJob { - @Autowired - private ITermDeferredStorageSvc myTerminologySvc; - - @Override - public void execute(JobExecutionContext theContext) { - myTerminologySvc.saveDeferred(); - } - } - @VisibleForTesting void setTransactionManagerForUnitTest(PlatformTransactionManager theTxManager) { myTransactionMgr = theTxManager; @@ -395,5 +497,20 @@ public class TermDeferredStorageSvcImpl implements ITermDeferredStorageSvc { ourLog.info("isDeferredCodeSystemDeletions: {}", isDeferredCodeSystemDeletions()); } + @Override + public synchronized void deleteCodeSystemVersion(TermCodeSystemVersion theCodeSystemVersion) { + myDeferredCodeSystemVersionsDeletions.add(theCodeSystemVersion); + } + + public static class Job implements HapiJob { + @Autowired + private ITermDeferredStorageSvc myTerminologySvc; + + @Override + public void execute(JobExecutionContext theContext) { + myTerminologySvc.saveDeferred(); + } + } + } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/ValueSetExpansionComponentWithConceptAccumulator.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/ValueSetExpansionComponentWithConceptAccumulator.java index a0899c28c4a..d368800ea81 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/ValueSetExpansionComponentWithConceptAccumulator.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/ValueSetExpansionComponentWithConceptAccumulator.java @@ -66,7 +66,7 @@ public class ValueSetExpansionComponentWithConceptAccumulator extends ValueSet.V myContext = theContext; } - @Nonnull + @Nonnull @Override public Integer getCapacityRemaining() { return (myMaxCapacity - myAddedConcepts) + mySkipCountRemaining; diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/api/ITermDeferredStorageSvc.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/api/ITermDeferredStorageSvc.java index ec48668af0d..91cdb11b7f6 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/api/ITermDeferredStorageSvc.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/term/api/ITermDeferredStorageSvc.java @@ -21,6 +21,7 @@ package ca.uhn.fhir.jpa.term.api; */ import ca.uhn.fhir.jpa.entity.TermCodeSystem; +import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion; import ca.uhn.fhir.jpa.entity.TermConcept; import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink; import ca.uhn.fhir.jpa.model.entity.ResourceTable; @@ -57,10 +58,13 @@ public interface ITermDeferredStorageSvc { void deleteCodeSystemForResource(ResourceTable theCodeSystemResourceToDelete); + void deleteCodeSystemVersion(TermCodeSystemVersion theCodeSystemVersion); + /** * This is mostly here for unit tests - Saves any and all deferred concepts and links */ void saveAllDeferred(); void logQueueForUnitTest(); + } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/CircularQueueCaptureQueriesListener.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/CircularQueueCaptureQueriesListener.java index 9ecca53d3e8..0dc0f9612bb 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/CircularQueueCaptureQueriesListener.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/CircularQueueCaptureQueriesListener.java @@ -157,7 +157,7 @@ public class CircularQueueCaptureQueriesListener extends BaseCaptureQueriesListe } /** - * Returns all INSERT queries executed on the current thread - Index 0 is oldest + * Returns all queries executed on the current thread - Index 0 is oldest */ public List getAllQueriesForCurrentThread() { return getQueriesForCurrentThreadStartingWith(""); @@ -252,14 +252,25 @@ public class CircularQueueCaptureQueriesListener extends BaseCaptureQueriesListe } /** - * Log all captured INSERT queries + * Log all captured queries */ public void logAllQueriesForCurrentThread() { List queries = getAllQueriesForCurrentThread() .stream() .map(CircularQueueCaptureQueriesListener::formatQueryAsSql) .collect(Collectors.toList()); - ourLog.info("Insert Queries:\n{}", String.join("\n", queries)); + ourLog.info("Queries:\n{}", String.join("\n", queries)); + } + + /** + * Log all captured queries + */ + public void logAllQueries() { + List queries = getCapturedQueries() + .stream() + .map(CircularQueueCaptureQueriesListener::formatQueryAsSql) + .collect(Collectors.toList()); + ourLog.info("Queries:\n{}", String.join("\n", queries)); } /** diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/CoordCalculator.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/CoordCalculator.java index e503f6fc3f0..193d6ed246d 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/CoordCalculator.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/CoordCalculator.java @@ -21,17 +21,24 @@ package ca.uhn.fhir.jpa.util; */ -import org.hibernate.search.spatial.impl.Point; +import org.hibernate.search.engine.spatial.GeoPoint; +import org.hibernate.search.engine.spatial.GeoBoundingBox; +import org.slf4j.Logger; + +import static ca.uhn.fhir.jpa.searchparam.extractor.GeopointNormalizer.normalizeLatitude; +import static ca.uhn.fhir.jpa.searchparam.extractor.GeopointNormalizer.normalizeLongitude; +import static org.slf4j.LoggerFactory.getLogger; public class CoordCalculator { + private static final Logger ourLog = getLogger(CoordCalculator.class); public static final double MAX_SUPPORTED_DISTANCE_KM = 10000.0; // Slightly less than a quarter of the earth's circumference private static final double RADIUS_EARTH_KM = 6378.1; // Source: https://stackoverflow.com/questions/7222382/get-lat-long-given-current-point-distance-and-bearing - static Point findTarget(double theLatitudeDegrees, double theLongitudeDegrees, double theBearingDegrees, double theDistanceKm) { + static GeoPoint findTarget(double theLatitudeDegrees, double theLongitudeDegrees, double theBearingDegrees, double theDistanceKm) { - double latitudeRadians = Math.toRadians(Point.normalizeLatitude(theLatitudeDegrees)); - double longitudeRadians = Math.toRadians(Point.normalizeLongitude(theLongitudeDegrees)); + double latitudeRadians = Math.toRadians(normalizeLatitude(theLatitudeDegrees)); + double longitudeRadians = Math.toRadians(normalizeLongitude(theLongitudeDegrees)); double bearingRadians = Math.toRadians(theBearingDegrees); double distanceRadians = theDistanceKm / RADIUS_EARTH_KM; @@ -41,18 +48,23 @@ public class CoordCalculator { double targetLongitude = longitudeRadians + Math.atan2(Math.sin(bearingRadians) * Math.sin(distanceRadians) * Math.cos(latitudeRadians), Math.cos(distanceRadians)-Math.sin(latitudeRadians) * Math.sin(targetLatitude)); - return Point.fromDegrees(Math.toDegrees(targetLatitude), Math.toDegrees(targetLongitude)); + double latitude = Math.toDegrees(targetLatitude); + double longitude = Math.toDegrees(targetLongitude); + + GeoPoint of = GeoPoint.of(normalizeLatitude(latitude), normalizeLongitude(longitude)); + return of; } /** * Find a box around my coordinates such that the closest distance to each edge is the provided distance + * @return */ - public static SearchBox getBox(double theLatitudeDegrees, double theLongitudeDegrees, Double theDistanceKm) { + public static GeoBoundingBox getBox(double theLatitudeDegrees, double theLongitudeDegrees, Double theDistanceKm) { double diagonalDistanceKm = theDistanceKm * Math.sqrt(2.0); - Point northEast = CoordCalculator.findTarget(theLatitudeDegrees, theLongitudeDegrees, 45.0, diagonalDistanceKm); - Point southWest = CoordCalculator.findTarget(theLatitudeDegrees, theLongitudeDegrees, 225.0, diagonalDistanceKm); + GeoPoint topLeft = CoordCalculator.findTarget(theLatitudeDegrees, theLongitudeDegrees, 315.0, diagonalDistanceKm); + GeoPoint bottomRight = CoordCalculator.findTarget(theLatitudeDegrees, theLongitudeDegrees, 135.0, diagonalDistanceKm); - return new SearchBox(southWest, northEast); + return GeoBoundingBox.of(topLeft, bottomRight); } } diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/SearchBox.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/SearchBox.java deleted file mode 100644 index 57cecfc30d3..00000000000 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/SearchBox.java +++ /dev/null @@ -1,45 +0,0 @@ -package ca.uhn.fhir.jpa.util; - -/*- - * #%L - * HAPI FHIR JPA Server - * %% - * Copyright (C) 2014 - 2021 Smile CDR, Inc. - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import org.hibernate.search.spatial.impl.Point; - -public class SearchBox { - private final Point mySouthWest; - private final Point myNorthEast; - - public SearchBox(Point theSouthWest, Point theNorthEast) { - mySouthWest = theSouthWest; - myNorthEast = theNorthEast; - } - - public Point getSouthWest() { - return mySouthWest; - } - - public Point getNorthEast() { - return myNorthEast; - } - - public boolean crossesAntiMeridian() { - return myNorthEast.getLongitude() < mySouthWest.getLongitude(); - } -} diff --git a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/TestUtil.java b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/TestUtil.java index 5a7bfb4308f..9f00e466696 100644 --- a/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/TestUtil.java +++ b/hapi-fhir-jpaserver-base/src/main/java/ca/uhn/fhir/jpa/util/TestUtil.java @@ -182,7 +182,9 @@ public class TestUtil { OneToOne oneToOne = nextField.getAnnotation(OneToOne.class); boolean isOtherSideOfOneToManyMapping = oneToMany != null && isNotBlank(oneToMany.mappedBy()); boolean isOtherSideOfOneToOneMapping = oneToOne != null && isNotBlank(oneToOne.mappedBy()); - boolean isField = nextField.getAnnotation(org.hibernate.search.annotations.Field.class) != null; + boolean isField = nextField.getAnnotation(org.hibernate.search.mapper.pojo.mapping.definition.annotation.FullTextField.class) != null; + isField |= nextField.getAnnotation(org.hibernate.search.mapper.pojo.mapping.definition.annotation.GenericField.class) != null; + isField |= nextField.getAnnotation(org.hibernate.search.mapper.pojo.mapping.definition.annotation.ScaledNumberField.class) != null; Validate.isTrue( hasEmbedded || hasColumn || diff --git a/hapi-fhir-jpaserver-base/src/main/resources/ca/uhn/fhir/jpa/search/lastn/ObservationCodeIndexSchema.json b/hapi-fhir-jpaserver-base/src/main/resources/ca/uhn/fhir/jpa/search/lastn/ObservationCodeIndexSchema.json index acedddf3490..3ac329472d7 100644 --- a/hapi-fhir-jpaserver-base/src/main/resources/ca/uhn/fhir/jpa/search/lastn/ObservationCodeIndexSchema.json +++ b/hapi-fhir-jpaserver-base/src/main/resources/ca/uhn/fhir/jpa/search/lastn/ObservationCodeIndexSchema.json @@ -1,26 +1,24 @@ { - "mappings" : { - "ca.uhn.fhir.jpa.model.entity.ObservationIndexedCodeCodeableConceptEntity" : { - "properties" : { - "codeable_concept_id" : { - "type" : "keyword" - }, - "codingcode" : { - "type" : "keyword" - }, - "codingcode_system_hash" : { - "type" : "keyword" - }, - "codingdisplay" : { - "type" : "keyword" - }, - "codingsystem" : { - "type" : "keyword" - }, - "text" : { - "type" : "keyword" - } - } - } - } + "mappings": { + "properties": { + "codeable_concept_id": { + "type": "keyword" + }, + "codingcode": { + "type": "keyword" + }, + "codingcode_system_hash": { + "type": "keyword" + }, + "codingdisplay": { + "type": "keyword" + }, + "codingsystem": { + "type": "keyword" + }, + "text": { + "type": "keyword" + } + } + } } diff --git a/hapi-fhir-jpaserver-base/src/main/resources/ca/uhn/fhir/jpa/search/lastn/ObservationIndexSchema.json b/hapi-fhir-jpaserver-base/src/main/resources/ca/uhn/fhir/jpa/search/lastn/ObservationIndexSchema.json index 5f76e5de590..2bf03d74c14 100644 --- a/hapi-fhir-jpaserver-base/src/main/resources/ca/uhn/fhir/jpa/search/lastn/ObservationIndexSchema.json +++ b/hapi-fhir-jpaserver-base/src/main/resources/ca/uhn/fhir/jpa/search/lastn/ObservationIndexSchema.json @@ -1,6 +1,5 @@ { "mappings" : { - "ca.uhn.fhir.jpa.model.entity.ObservationIndexedSearchParamLastNEntity" : { "properties" : { "codeconceptid" : { "type" : "keyword" @@ -47,4 +46,3 @@ } } } -} diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/config/TestDstu2Config.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/config/TestDstu2Config.java index b2703eedb01..5eac45f795a 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/config/TestDstu2Config.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/config/TestDstu2Config.java @@ -1,16 +1,19 @@ package ca.uhn.fhir.jpa.config; -import ca.uhn.fhir.jpa.search.LuceneSearchMappingFactory; +import ca.uhn.fhir.jpa.search.HapiLuceneAnalysisConfigurer; import ca.uhn.fhir.jpa.util.CircularQueueCaptureQueriesListener; import ca.uhn.fhir.jpa.util.CurrentThreadCaptureQueriesListener; import ca.uhn.fhir.rest.server.interceptor.RequestValidatingInterceptor; import ca.uhn.fhir.validation.IInstanceValidatorModule; import ca.uhn.fhir.validation.ResultSeverityEnum; import net.ttddyy.dsproxy.listener.ThreadQueryCountHolder; -import net.ttddyy.dsproxy.listener.logging.SLF4JLogLevel; import net.ttddyy.dsproxy.support.ProxyDataSourceBuilder; import org.apache.commons.dbcp2.BasicDataSource; import org.hibernate.dialect.H2Dialect; +import org.hibernate.search.backend.lucene.cfg.LuceneBackendSettings; +import org.hibernate.search.backend.lucene.cfg.LuceneIndexSettings; +import org.hibernate.search.engine.cfg.BackendSettings; +import org.hibernate.search.mapper.orm.cfg.HibernateOrmMapperSettings; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.context.annotation.Bean; @@ -23,9 +26,11 @@ import org.springframework.transaction.annotation.EnableTransactionManagement; import javax.sql.DataSource; import java.sql.Connection; import java.sql.SQLException; +import java.util.Map; import java.util.Properties; import java.util.concurrent.TimeUnit; +import static ca.uhn.fhir.jpa.dao.BaseJpaTest.buildHeapLuceneHibernateSearchProperties; import static org.junit.jupiter.api.Assertions.fail; @Configuration @@ -148,12 +153,13 @@ public class TestDstu2Config extends BaseJavaConfigDstu2 { extraProperties.put("hibernate.show_sql", "false"); extraProperties.put("hibernate.hbm2ddl.auto", "update"); extraProperties.put("hibernate.dialect", H2Dialect.class.getName()); - extraProperties.put("hibernate.search.model_mapping", LuceneSearchMappingFactory.class.getName()); - extraProperties.put("hibernate.search.default.directory_provider", "local-heap"); - extraProperties.put("hibernate.search.lucene_version", "LUCENE_CURRENT"); + + extraProperties.putAll(buildHeapLuceneHibernateSearchProperties()); + return extraProperties; } + /** * Bean which validates incoming requests */ diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/config/TestDstu3Config.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/config/TestDstu3Config.java index 5c46caa0981..8cf0747a984 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/config/TestDstu3Config.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/config/TestDstu3Config.java @@ -1,6 +1,6 @@ package ca.uhn.fhir.jpa.config; -import ca.uhn.fhir.jpa.search.LuceneSearchMappingFactory; +import ca.uhn.fhir.jpa.search.HapiLuceneAnalysisConfigurer; import ca.uhn.fhir.jpa.subscription.match.deliver.email.IEmailSender; import ca.uhn.fhir.jpa.subscription.match.deliver.email.JavaMailEmailSender; import ca.uhn.fhir.jpa.util.CircularQueueCaptureQueriesListener; @@ -10,6 +10,10 @@ import ca.uhn.fhir.validation.ResultSeverityEnum; import net.ttddyy.dsproxy.support.ProxyDataSourceBuilder; import org.apache.commons.dbcp2.BasicDataSource; import org.hibernate.dialect.H2Dialect; +import org.hibernate.search.backend.lucene.cfg.LuceneBackendSettings; +import org.hibernate.search.backend.lucene.cfg.LuceneIndexSettings; +import org.hibernate.search.engine.cfg.BackendSettings; +import org.hibernate.search.mapper.orm.cfg.HibernateOrmMapperSettings; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; @@ -151,10 +155,13 @@ public class TestDstu3Config extends BaseJavaConfigDstu3 { extraProperties.put("hibernate.show_sql", "false"); extraProperties.put("hibernate.hbm2ddl.auto", "update"); extraProperties.put("hibernate.dialect", H2Dialect.class.getName()); - extraProperties.put("hibernate.search.model_mapping", LuceneSearchMappingFactory.class.getName()); - extraProperties.put("hibernate.search.default.directory_provider", "local-heap"); - extraProperties.put("hibernate.search.lucene_version", "LUCENE_CURRENT"); - extraProperties.put("hibernate.search.autoregister_listeners", "true"); + + extraProperties.put(BackendSettings.backendKey(BackendSettings.TYPE), "lucene"); + extraProperties.put(BackendSettings.backendKey(LuceneBackendSettings.ANALYSIS_CONFIGURER), HapiLuceneAnalysisConfigurer.class.getName()); + extraProperties.put(BackendSettings.backendKey(LuceneIndexSettings.DIRECTORY_TYPE), "local-heap"); + extraProperties.put(BackendSettings.backendKey(LuceneBackendSettings.LUCENE_VERSION), "LUCENE_CURRENT"); + extraProperties.put(HibernateOrmMapperSettings.ENABLED, "true"); + return extraProperties; } diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/config/TestR4Config.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/config/TestR4Config.java index 060069e9f09..8849f1acf06 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/config/TestR4Config.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/config/TestR4Config.java @@ -6,6 +6,7 @@ import ca.uhn.fhir.jpa.batch.svc.BatchJobSubmitterImpl; import ca.uhn.fhir.jpa.binstore.IBinaryStorageSvc; import ca.uhn.fhir.jpa.binstore.MemoryBinaryStorageSvcImpl; import ca.uhn.fhir.jpa.bulk.svc.BulkExportDaoSvc; +import ca.uhn.fhir.jpa.search.HapiLuceneAnalysisConfigurer; import ca.uhn.fhir.jpa.util.CircularQueueCaptureQueriesListener; import ca.uhn.fhir.jpa.util.CurrentThreadCaptureQueriesListener; import ca.uhn.fhir.rest.server.interceptor.RequestValidatingInterceptor; @@ -15,6 +16,10 @@ import net.ttddyy.dsproxy.listener.logging.SLF4JLogLevel; import net.ttddyy.dsproxy.support.ProxyDataSourceBuilder; import org.apache.commons.dbcp2.BasicDataSource; import org.hibernate.dialect.H2Dialect; +import org.hibernate.search.backend.lucene.cfg.LuceneBackendSettings; +import org.hibernate.search.backend.lucene.cfg.LuceneIndexSettings; +import org.hibernate.search.engine.cfg.BackendSettings; +import org.hibernate.search.mapper.orm.cfg.HibernateOrmMapperSettings; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; @@ -27,6 +32,7 @@ import java.sql.Connection; import java.util.Properties; import java.util.concurrent.TimeUnit; +import static ca.uhn.fhir.jpa.dao.BaseJpaTest.buildHeapLuceneHibernateSearchProperties; import static org.junit.jupiter.api.Assertions.fail; @Configuration @@ -160,11 +166,8 @@ public class TestR4Config extends BaseJavaConfigR4 { extraProperties.put("hibernate.show_sql", "false"); extraProperties.put("hibernate.hbm2ddl.auto", "update"); extraProperties.put("hibernate.dialect", H2Dialect.class.getName()); - extraProperties.put("hibernate.search.model_mapping", ca.uhn.fhir.jpa.search.LuceneSearchMappingFactory.class.getName()); - extraProperties.put("hibernate.search.default.directory_provider", "local-heap"); - extraProperties.put("hibernate.search.lucene_version", "LUCENE_CURRENT"); - extraProperties.put("hibernate.search.autoregister_listeners", "true"); - extraProperties.put("hibernate.temp.use_jdbc_metadata_defaults","false"); + + extraProperties.putAll(buildHeapLuceneHibernateSearchProperties()); return extraProperties; } diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/config/TestR4ConfigWithElasticSearch.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/config/TestR4ConfigWithElasticSearch.java index ebf3fc9e094..81a5bc6a293 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/config/TestR4ConfigWithElasticSearch.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/config/TestR4ConfigWithElasticSearch.java @@ -1,77 +1,60 @@ package ca.uhn.fhir.jpa.config; -import ca.uhn.fhir.context.ConfigurationException; import ca.uhn.fhir.jpa.search.elastic.ElasticsearchHibernatePropertiesBuilder; -import org.hibernate.search.elasticsearch.cfg.ElasticsearchIndexStatus; -import org.hibernate.search.elasticsearch.cfg.IndexSchemaManagementStrategy; +import ca.uhn.fhir.jpa.search.lastn.config.TestElasticsearchContainerHelper; +import org.hibernate.search.backend.elasticsearch.index.IndexStatus; +import org.hibernate.search.mapper.orm.schema.management.SchemaManagementStrategyName; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; -import pl.allegro.tech.embeddedelasticsearch.EmbeddedElastic; -import pl.allegro.tech.embeddedelasticsearch.PopularProperties; +import org.testcontainers.elasticsearch.ElasticsearchContainer; import javax.annotation.PreDestroy; -import java.io.IOException; import java.util.Properties; -import java.util.UUID; -import java.util.concurrent.TimeUnit; + @Configuration public class TestR4ConfigWithElasticSearch extends TestR4Config { private static final Logger ourLog = LoggerFactory.getLogger(TestR4ConfigWithElasticSearch.class); - private static final String ELASTIC_VERSION = "6.5.4"; - protected final String elasticsearchHost = "localhost"; - protected final String elasticsearchUserId = ""; - protected final String elasticsearchPassword = ""; - @Override @Bean public Properties jpaProperties() { Properties retVal = super.jpaProperties(); + //Override default lucene settings // Force elasticsearch to start first - int httpPort = embeddedElasticSearch().getHttpPort(); - ourLog.info("ElasticSearch started on port: {}", httpPort); + int httpPort = elasticContainer().getMappedPort(9200);//9200 is the HTTP port + String host = elasticContainer().getHost(); new ElasticsearchHibernatePropertiesBuilder() - .setDebugRefreshAfterWrite(true) + .setDebugIndexSyncStrategy("read-sync") .setDebugPrettyPrintJsonLog(true) - .setIndexSchemaManagementStrategy(IndexSchemaManagementStrategy.CREATE) + .setIndexSchemaManagementStrategy(SchemaManagementStrategyName.CREATE) .setIndexManagementWaitTimeoutMillis(10000) - .setRequiredIndexStatus(ElasticsearchIndexStatus.YELLOW) - .setRestUrl("http://"+ elasticsearchHost + ":" + httpPort) - .setUsername(elasticsearchUserId) - .setPassword(elasticsearchPassword) + .setRequiredIndexStatus(IndexStatus.YELLOW) + .setRestUrl(host+ ":" + httpPort) + .setProtocol("http") + .setUsername("") + .setPassword("") .apply(retVal); return retVal; } @Bean - public EmbeddedElastic embeddedElasticSearch() { - EmbeddedElastic embeddedElastic = null; - try { - embeddedElastic = EmbeddedElastic.builder() - .withElasticVersion(ELASTIC_VERSION) - .withSetting(PopularProperties.TRANSPORT_TCP_PORT, 0) - .withSetting(PopularProperties.HTTP_PORT, 0) - .withSetting(PopularProperties.CLUSTER_NAME, UUID.randomUUID()) - .withStartTimeout(60, TimeUnit.SECONDS) - .build() - .start(); - } catch (IOException | InterruptedException e) { - throw new ConfigurationException(e); - } - - return embeddedElastic; + public ElasticsearchContainer elasticContainer() { + ElasticsearchContainer embeddedElasticSearch = TestElasticsearchContainerHelper.getEmbeddedElasticSearch(); + embeddedElasticSearch.start(); + return embeddedElasticSearch; } + @PreDestroy public void stop() { - embeddedElasticSearch().stop(); + elasticContainer().stop(); } } diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/config/TestR4ConfigWithElasticsearchClient.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/config/TestR4ConfigWithElasticsearchClient.java index b2e711b0c65..7e52caaf37f 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/config/TestR4ConfigWithElasticsearchClient.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/config/TestR4ConfigWithElasticsearchClient.java @@ -1,5 +1,6 @@ package ca.uhn.fhir.jpa.config; +import ca.uhn.fhir.jpa.model.config.PartitionSettings; import ca.uhn.fhir.jpa.search.lastn.ElasticsearchSvcImpl; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; @@ -10,10 +11,17 @@ import java.io.IOException; @Configuration public class TestR4ConfigWithElasticsearchClient extends TestR4ConfigWithElasticSearch { + + @Bean + public PartitionSettings partitionSettings() { + return new PartitionSettings(); + } + @Bean() public ElasticsearchSvcImpl myElasticsearchSvc() { - int elasticsearchPort = embeddedElasticSearch().getHttpPort(); - return new ElasticsearchSvcImpl(elasticsearchHost, elasticsearchPort, elasticsearchUserId, elasticsearchPassword); + int elasticsearchPort = elasticContainer().getMappedPort(9200); + String host = elasticContainer().getHost(); + return new ElasticsearchSvcImpl(host, elasticsearchPort, "", ""); } @PreDestroy diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/config/TestR4WithLuceneDisabledConfig.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/config/TestR4WithLuceneDisabledConfig.java index 7ec227c1896..f0e4fc6f0b3 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/config/TestR4WithLuceneDisabledConfig.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/config/TestR4WithLuceneDisabledConfig.java @@ -3,14 +3,14 @@ package ca.uhn.fhir.jpa.config; import java.util.Properties; import org.hibernate.dialect.H2Dialect; -import org.hibernate.jpa.HibernatePersistenceProvider; -import org.springframework.beans.factory.annotation.Autowire; +import org.hibernate.search.backend.lucene.cfg.LuceneBackendSettings; +import org.hibernate.search.engine.cfg.BackendSettings; +import org.hibernate.search.mapper.orm.cfg.HibernateOrmMapperSettings; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean; import org.springframework.transaction.annotation.EnableTransactionManagement; -import ca.uhn.fhir.jpa.dao.FulltextSearchSvcImpl; import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc; @Configuration @@ -41,7 +41,7 @@ public class TestR4WithLuceneDisabledConfig extends TestR4Config { extraProperties.put("hibernate.show_sql", "false"); extraProperties.put("hibernate.hbm2ddl.auto", "update"); extraProperties.put("hibernate.dialect", H2Dialect.class.getName()); - extraProperties.put("hibernate.search.autoregister_listeners", "false"); + extraProperties.put(HibernateOrmMapperSettings.ENABLED, "false"); return extraProperties; } diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/config/TestR5Config.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/config/TestR5Config.java index eed2b68e933..254c0c8a357 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/config/TestR5Config.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/config/TestR5Config.java @@ -2,6 +2,7 @@ package ca.uhn.fhir.jpa.config; import ca.uhn.fhir.jpa.binstore.IBinaryStorageSvc; import ca.uhn.fhir.jpa.binstore.MemoryBinaryStorageSvcImpl; +import ca.uhn.fhir.jpa.search.HapiLuceneAnalysisConfigurer; import ca.uhn.fhir.jpa.util.CircularQueueCaptureQueriesListener; import ca.uhn.fhir.jpa.util.CurrentThreadCaptureQueriesListener; import ca.uhn.fhir.rest.server.interceptor.RequestValidatingInterceptor; @@ -10,6 +11,10 @@ import net.ttddyy.dsproxy.listener.SingleQueryCountHolder; import net.ttddyy.dsproxy.support.ProxyDataSourceBuilder; import org.apache.commons.dbcp2.BasicDataSource; import org.hibernate.dialect.H2Dialect; +import org.hibernate.search.backend.lucene.cfg.LuceneBackendSettings; +import org.hibernate.search.backend.lucene.cfg.LuceneIndexSettings; +import org.hibernate.search.engine.cfg.BackendSettings; +import org.hibernate.search.mapper.orm.cfg.HibernateOrmMapperSettings; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; @@ -23,6 +28,7 @@ import javax.sql.DataSource; import java.sql.Connection; import java.util.Properties; +import static ca.uhn.fhir.jpa.dao.BaseJpaTest.buildHeapLuceneHibernateSearchProperties; import static org.junit.jupiter.api.Assertions.fail; @Configuration @@ -146,10 +152,8 @@ public class TestR5Config extends BaseJavaConfigR5 { extraProperties.put("hibernate.show_sql", "false"); extraProperties.put("hibernate.hbm2ddl.auto", "update"); extraProperties.put("hibernate.dialect", H2Dialect.class.getName()); - extraProperties.put("hibernate.search.model_mapping", ca.uhn.fhir.jpa.search.LuceneSearchMappingFactory.class.getName()); - extraProperties.put("hibernate.search.default.directory_provider", "local-heap"); - extraProperties.put("hibernate.search.lucene_version", "LUCENE_CURRENT"); - extraProperties.put("hibernate.search.autoregister_listeners", "true"); + + extraProperties.putAll(buildHeapLuceneHibernateSearchProperties()); return extraProperties; } diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/BaseJpaTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/BaseJpaTest.java index fa95c97a45c..8d6007c492e 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/BaseJpaTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/BaseJpaTest.java @@ -13,10 +13,14 @@ import ca.uhn.fhir.jpa.bulk.api.IBulkDataExportSvc; import ca.uhn.fhir.jpa.config.BaseConfig; import ca.uhn.fhir.jpa.dao.index.IdHelperService; import ca.uhn.fhir.jpa.entity.TermConcept; +import ca.uhn.fhir.jpa.entity.TermValueSet; +import ca.uhn.fhir.jpa.entity.TermValueSetConcept; +import ca.uhn.fhir.jpa.entity.TermValueSetConceptDesignation; import ca.uhn.fhir.jpa.model.util.JpaConstants; import ca.uhn.fhir.jpa.partition.IPartitionLookupSvc; import ca.uhn.fhir.jpa.provider.SystemProviderDstu2Test; import ca.uhn.fhir.jpa.search.DatabaseBackedPagingProvider; +import ca.uhn.fhir.jpa.search.HapiLuceneAnalysisConfigurer; import ca.uhn.fhir.jpa.search.PersistedJpaBundleProvider; import ca.uhn.fhir.jpa.search.cache.ISearchCacheSvc; import ca.uhn.fhir.jpa.search.cache.ISearchResultCacheSvc; @@ -24,6 +28,7 @@ import ca.uhn.fhir.jpa.search.reindex.IResourceReindexingSvc; import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamRegistry; import ca.uhn.fhir.jpa.subscription.match.registry.SubscriptionLoader; import ca.uhn.fhir.jpa.subscription.match.registry.SubscriptionRegistry; +import ca.uhn.fhir.jpa.term.ValueSetExpansionR4Test; import ca.uhn.fhir.jpa.util.CircularQueueCaptureQueriesListener; import ca.uhn.fhir.jpa.util.MemoryCacheService; import ca.uhn.fhir.model.dstu2.resource.Bundle; @@ -45,6 +50,11 @@ import org.apache.commons.io.IOUtils; import org.hibernate.HibernateException; import org.hibernate.Session; import org.hibernate.SessionFactory; +import org.hibernate.jdbc.Work; +import org.hibernate.search.backend.lucene.cfg.LuceneBackendSettings; +import org.hibernate.search.backend.lucene.cfg.LuceneIndexSettings; +import org.hibernate.search.engine.cfg.BackendSettings; +import org.hibernate.search.mapper.orm.cfg.HibernateOrmMapperSettings; import org.hl7.fhir.common.hapi.validation.validator.FhirInstanceValidator; import org.hl7.fhir.dstu3.model.Bundle.BundleEntryComponent; import org.hl7.fhir.dstu3.model.Resource; @@ -61,6 +71,7 @@ import org.mockito.Mock; import org.mockito.MockitoAnnotations; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.data.domain.Pageable; import org.springframework.orm.jpa.JpaTransactionManager; import org.springframework.test.context.TestPropertySource; import org.springframework.transaction.PlatformTransactionManager; @@ -76,14 +87,18 @@ import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; +import java.util.Map; +import java.util.Optional; import java.util.Set; import java.util.concurrent.Callable; import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicBoolean; import java.util.stream.Collectors; +import java.util.stream.Stream; import static ca.uhn.fhir.util.TestUtil.randomizeLocale; import static org.awaitility.Awaitility.await; +import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.fail; import static org.mockito.ArgumentMatchers.eq; @@ -111,6 +126,16 @@ public abstract class BaseJpaTest extends BaseTest { TestUtil.setShouldRandomizeTimezones(false); } + public static Map buildHeapLuceneHibernateSearchProperties() { + Map props = new HashMap<>(); + props.put(BackendSettings.backendKey(BackendSettings.TYPE), "lucene"); + props.put(BackendSettings.backendKey(LuceneBackendSettings.ANALYSIS_CONFIGURER), HapiLuceneAnalysisConfigurer.class.getName()); + props.put(BackendSettings.backendKey(LuceneIndexSettings.DIRECTORY_TYPE), "local-heap"); + props.put(BackendSettings.backendKey(LuceneBackendSettings.LUCENE_VERSION), "LUCENE_CURRENT"); + props.put(HibernateOrmMapperSettings.ENABLED, "true"); + return props; + } + @RegisterExtension public LoggingExtension myLoggingExtension = new LoggingExtension(); @Mock(answer = Answers.RETURNS_DEEP_STUBS) @@ -580,4 +605,63 @@ public abstract class BaseJpaTest extends BaseTest { Thread.sleep(500); } + protected TermValueSetConceptDesignation assertTermConceptContainsDesignation(TermValueSetConcept theConcept, String theLanguage, String theUseSystem, String theUseCode, String theUseDisplay, String theDesignationValue) { + Stream stream = theConcept.getDesignations().stream(); + if (theLanguage != null) { + stream = stream.filter(designation -> theLanguage.equalsIgnoreCase(designation.getLanguage())); + } + if (theUseSystem != null) { + stream = stream.filter(designation -> theUseSystem.equalsIgnoreCase(designation.getUseSystem())); + } + if (theUseCode != null) { + stream = stream.filter(designation -> theUseCode.equalsIgnoreCase(designation.getUseCode())); + } + if (theUseDisplay != null) { + stream = stream.filter(designation -> theUseDisplay.equalsIgnoreCase(designation.getUseDisplay())); + } + if (theDesignationValue != null) { + stream = stream.filter(designation -> theDesignationValue.equalsIgnoreCase(designation.getValue())); + } + + Optional first = stream.findFirst(); + if (!first.isPresent()) { + String failureMessage = String.format("Concept %s did not contain designation [%s|%s|%s|%s|%s] ", theConcept.toString(), theLanguage, theUseSystem, theUseCode, theUseDisplay, theDesignationValue); + fail(failureMessage); + return null; + } else { + return first.get(); + } + + } + + protected TermValueSetConcept assertTermValueSetContainsConceptAndIsInDeclaredOrder(TermValueSet theValueSet, String theSystem, String theCode, String theDisplay, Integer theDesignationCount) { + List contains = theValueSet.getConcepts(); + + Stream stream = contains.stream(); + if (theSystem != null) { + stream = stream.filter(concept -> theSystem.equalsIgnoreCase(concept.getSystem())); + } + if (theCode != null ) { + stream = stream.filter(concept -> theCode.equalsIgnoreCase(concept.getCode())); + } + if (theDisplay != null){ + stream = stream.filter(concept -> theDisplay.equalsIgnoreCase(concept.getDisplay())); + } + if (theDesignationCount != null) { + stream = stream.filter(concept -> concept.getDesignations().size() == theDesignationCount); + } + + Optional first = stream.findFirst(); + if (!first.isPresent()) { + String failureMessage = String.format("Expanded ValueSet %s did not contain concept [%s|%s|%s] with [%d] designations", theValueSet.getId(), theSystem, theCode, theDisplay, theDesignationCount); + fail(failureMessage); + return null; + } else { + TermValueSetConcept termValueSetConcept = first.get(); + assertEquals(termValueSetConcept.getOrder(), theValueSet.getConcepts().indexOf(termValueSetConcept)); + return termValueSetConcept; + } + } + + } diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/BaseJpaDstu2Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/BaseJpaDstu2Test.java index a3804114065..f530dbaaabe 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/BaseJpaDstu2Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/BaseJpaDstu2Test.java @@ -62,8 +62,8 @@ import ca.uhn.fhir.rest.api.EncodingEnum; import ca.uhn.fhir.rest.server.provider.ResourceProviderFactory; import ca.uhn.fhir.util.TestUtil; import org.apache.commons.io.IOUtils; -import org.hibernate.search.jpa.FullTextEntityManager; -import org.hibernate.search.jpa.Search; +import org.hibernate.search.mapper.orm.Search; +import org.hibernate.search.mapper.orm.session.SearchSession; import org.hl7.fhir.instance.model.api.IBaseResource; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.AfterEach; @@ -226,10 +226,10 @@ public abstract class BaseJpaDstu2Test extends BaseJpaTest { @BeforeEach public void beforeFlushFT() { runInTransaction(() -> { - FullTextEntityManager ftem = Search.getFullTextEntityManager(myEntityManager); - ftem.purgeAll(ResourceTable.class); - ftem.purgeAll(ResourceIndexedSearchParamString.class); - ftem.flushToIndexes(); + SearchSession searchSession = Search.session(myEntityManager); + searchSession.workspace(ResourceTable.class).purge(); +// searchSession.workspace(ResourceIndexedSearchParamString.class).purge(); + searchSession.indexingPlan().execute(); }); myDaoConfig.setSchedulingDisabled(true); diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/FhirResourceDaoDstu2SearchFtTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/FhirResourceDaoDstu2SearchFtTest.java index 1136fb072f3..eaea5763de5 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/FhirResourceDaoDstu2SearchFtTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/FhirResourceDaoDstu2SearchFtTest.java @@ -16,7 +16,6 @@ import org.junit.jupiter.api.Test; import org.springframework.transaction.TransactionStatus; import org.springframework.transaction.support.TransactionCallbackWithoutResult; -import ca.uhn.fhir.jpa.dao.FulltextSearchSvcImpl.Suggestion; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.model.dstu2.resource.*; import ca.uhn.fhir.model.primitive.Base64BinaryDt; @@ -33,114 +32,6 @@ public class FhirResourceDaoDstu2SearchFtTest extends BaseJpaDstu2Test { myDaoConfig.setReuseCachedSearchResultsForMillis(null); } - @Test - public void testSuggestIgnoresBase64Content() { - Patient patient = new Patient(); - patient.addName().addFamily("testSuggest"); - IIdType ptId = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless(); - - Media med = new Media(); - med.getSubject().setReference(ptId); - med.getSubtype().setText("Systolic Blood Pressure"); - med.getContent().setContentType("LCws"); - med.getContent().setData(new Base64BinaryDt(new byte[] { 44, 44, 44, 44, 44, 44, 44, 44 })); - med.getContent().setTitle("bbbb syst"); - myMediaDao.create(med, mySrd); - ourLog.info(myFhirCtx.newJsonParser().encodeResourceToString(med)); - - List output = mySearchDao.suggestKeywords("Patient/" + ptId.getIdPart() + "/$everything", "_content", "press", null); - ourLog.info("Found: " + output); - assertEquals(2, output.size()); - assertEquals("Pressure", output.get(0).getTerm()); - assertEquals("Systolic Blood Pressure", output.get(1).getTerm()); - - output = mySearchDao.suggestKeywords("Patient/" + ptId.getIdPart() + "/$everything", "_content", "prezure", null); - ourLog.info("Found: " + output); - assertEquals(2, output.size()); - assertEquals("Pressure", output.get(0).getTerm()); - assertEquals("Systolic Blood Pressure", output.get(1).getTerm()); - - output = mySearchDao.suggestKeywords("Patient/" + ptId.getIdPart() + "/$everything", "_content", "syst", null); - ourLog.info("Found: " + output); - assertEquals(4, output.size()); - assertEquals("syst", output.get(0).getTerm()); - assertEquals("bbbb syst", output.get(1).getTerm()); - assertEquals("Systolic", output.get(2).getTerm()); - assertEquals("Systolic Blood Pressure", output.get(3).getTerm()); - - output = mySearchDao.suggestKeywords("Patient/" + ptId.getIdPart() + "/$everything", "_content", "LCws", null); - ourLog.info("Found: " + output); - assertEquals(0, output.size()); - } - - @Test - public void testSuggest() { - Patient patient = new Patient(); - patient.addName().addFamily("testSuggest"); - IIdType ptId = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless(); - - Observation obs = new Observation(); - obs.getSubject().setReference(ptId); - obs.getCode().setText("ZXCVBNM ASDFGHJKL QWERTYUIOPASDFGHJKL"); - myObservationDao.create(obs, mySrd); - - obs = new Observation(); - obs.getSubject().setReference(ptId); - obs.getCode().setText("MNBVCXZ"); - myObservationDao.create(obs, mySrd); - - obs = new Observation(); - obs.getSubject().setReference(ptId); - obs.getCode().setText("ZXC HELLO"); - obs.addComponent().getCode().setText("HHHHHHHHHH"); - myObservationDao.create(obs, mySrd); - - /* - * These shouldn't match since they're for another patient - */ - patient = new Patient(); - patient.addName().addFamily("testSuggest2"); - IIdType ptId2 = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless(); - - Observation obs2 = new Observation(); - obs2.getSubject().setReference(ptId2); - obs2.getCode().setText("ZXCVBNMZZ"); - myObservationDao.create(obs2, mySrd); - - List output = mySearchDao.suggestKeywords("Patient/" + ptId.getIdPart() + "/$everything", "_content", "ZXCVBNM", null); - ourLog.info("Found: " + output); - assertEquals(4, output.size()); - assertEquals("ZXCVBNM", output.get(0).getTerm()); - assertEquals("ZXCVBNM ASDFGHJKL QWERTYUIOPASDFGHJKL", output.get(1).getTerm()); - assertEquals("ZXC", output.get(2).getTerm()); - assertEquals("ZXC HELLO", output.get(3).getTerm()); - - output = mySearchDao.suggestKeywords("Patient/" + ptId.getIdPart() + "/$everything", "_content", "ZXC", null); - ourLog.info("Found: " + output); - assertEquals(4, output.size()); - assertEquals("ZXC", output.get(0).getTerm()); - assertEquals("ZXC HELLO", output.get(1).getTerm()); - assertEquals("ZXCVBNM", output.get(2).getTerm()); - assertEquals("ZXCVBNM ASDFGHJKL QWERTYUIOPASDFGHJKL", output.get(3).getTerm()); - - output = mySearchDao.suggestKeywords("Patient/" + ptId.getIdPart() + "/$everything", "_content", "HELO", null); - ourLog.info("Found: " + output); - assertEquals(2, output.size()); - assertEquals("HELLO", output.get(0).getTerm()); - assertEquals("ZXC HELLO", output.get(1).getTerm()); - - output = mySearchDao.suggestKeywords("Patient/" + ptId.getIdPart() + "/$everything", "_content", "Z", null); - ourLog.info("Found: " + output); - assertEquals(0, output.size()); - - output = mySearchDao.suggestKeywords("Patient/" + ptId.getIdPart() + "/$everything", "_content", "ZX", null); - ourLog.info("Found: " + output); - assertEquals(2, output.size()); - assertEquals("ZXC", output.get(0).getTerm()); - assertEquals("ZXC HELLO", output.get(1).getTerm()); - - } - @Test public void testSearchAndReindex() { SearchParameterMap map; diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/FhirSystemDaoDstu2Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/FhirSystemDaoDstu2Test.java index 4c7c3706544..9cd366f3b1e 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/FhirSystemDaoDstu2Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu2/FhirSystemDaoDstu2Test.java @@ -1165,7 +1165,6 @@ public class FhirSystemDaoDstu2Test extends BaseJpaDstu2SystemTest { assertEquals("204 No Content", resp.getEntry().get(3).getResponse().getStatus()); } - Bundle respGetBundle = (Bundle) resp.getEntry().get(0).getResource(); assertEquals(1, respGetBundle.getEntry().size()); assertEquals("testTransactionOrdering" + pass, ((Patient) respGetBundle.getEntry().get(0).getResource()).getNameFirstRep().getFamilyFirstRep().getValue()); diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/BaseJpaDstu3Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/BaseJpaDstu3Test.java index 7c8a52d6a48..a33280e97ce 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/BaseJpaDstu3Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/BaseJpaDstu3Test.java @@ -25,6 +25,7 @@ import ca.uhn.fhir.jpa.dao.data.IResourceTableDao; import ca.uhn.fhir.jpa.dao.data.IResourceTagDao; import ca.uhn.fhir.jpa.dao.data.ITagDefinitionDao; import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemDao; +import ca.uhn.fhir.jpa.dao.data.ITermCodeSystemVersionDao; import ca.uhn.fhir.jpa.dao.data.ITermConceptDao; import ca.uhn.fhir.jpa.dao.data.ITermConceptMapDao; import ca.uhn.fhir.jpa.dao.data.ITermConceptMapGroupElementTargetDao; @@ -54,8 +55,8 @@ import ca.uhn.fhir.rest.server.exceptions.InternalErrorException; import ca.uhn.fhir.rest.server.provider.ResourceProviderFactory; import ca.uhn.fhir.util.UrlUtil; import org.apache.commons.io.IOUtils; -import org.hibernate.search.jpa.FullTextEntityManager; -import org.hibernate.search.jpa.Search; +import org.hibernate.search.mapper.orm.Search; +import org.hibernate.search.mapper.orm.session.SearchSession; import org.hl7.fhir.dstu3.model.AllergyIntolerance; import org.hl7.fhir.dstu3.model.Appointment; import org.hl7.fhir.dstu3.model.AuditEvent; @@ -319,6 +320,8 @@ public abstract class BaseJpaDstu3Test extends BaseJpaTest { @Autowired protected ITermCodeSystemDao myTermCodeSystemDao; @Autowired + protected ITermCodeSystemVersionDao myTermCodeSystemVersionDao; + @Autowired protected ITermReadSvc myTermSvc; @Autowired protected PlatformTransactionManager myTransactionMgr; @@ -375,10 +378,10 @@ public abstract class BaseJpaDstu3Test extends BaseJpaTest { @BeforeEach public void beforeFlushFT() { runInTransaction(() -> { - FullTextEntityManager ftem = Search.getFullTextEntityManager(myEntityManager); - ftem.purgeAll(ResourceTable.class); - ftem.purgeAll(ResourceIndexedSearchParamString.class); - ftem.flushToIndexes(); + SearchSession searchSession = Search.session(myEntityManager); + searchSession.workspace(ResourceTable.class).purge(); +// searchSession.workspace(ResourceIndexedSearchParamString.class).purge(); + searchSession.indexingPlan().execute(); }); myDaoConfig.setSchedulingDisabled(true); diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3CodeSystemTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3CodeSystemTest.java index edbb9f682a9..2e82fe6b11a 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3CodeSystemTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3CodeSystemTest.java @@ -63,10 +63,24 @@ public class FhirResourceDaoDstu3CodeSystemTest extends BaseJpaDstu3Test { cs.addConcept().setCode("A"); cs.addConcept().setCode("B"); myCodeSystemDao.update(cs, mySrd); + myTerminologyDeferredStorageSvc.saveAllDeferred(); runInTransaction(()->{ assertEquals(2, myConceptDao.count()); }); + // Update the code system to reduce the count again + cs = new CodeSystem(); + cs.setId(id); + cs.setUrl("http://foo"); + cs.setContent(CodeSystem.CodeSystemContentMode.COMPLETE); + cs.setStatus(Enumerations.PublicationStatus.ACTIVE); + cs.addConcept().setCode("C"); + myCodeSystemDao.update(cs, mySrd); + myTerminologyDeferredStorageSvc.saveAllDeferred(); + runInTransaction(()->{ + assertEquals(1, myConceptDao.count()); + }); + // Delete the code system runInTransaction(()->{ myCodeSystemDao.delete(id); diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3SearchFtTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3SearchFtTest.java index a0bdbcb7e00..f46c2612526 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3SearchFtTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/dstu3/FhirResourceDaoDstu3SearchFtTest.java @@ -1,7 +1,6 @@ package ca.uhn.fhir.jpa.dao.dstu3; import ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao; -import ca.uhn.fhir.jpa.dao.FulltextSearchSvcImpl.Suggestion; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.param.StringAndListParam; @@ -141,116 +140,6 @@ public class FhirResourceDaoDstu3SearchFtTest extends BaseJpaDstu3Test { } - - @Test - public void testSuggestIgnoresBase64Content() { - Patient patient = new Patient(); - patient.addName().setFamily("testSuggest"); - IIdType ptId = myPatientDao.create(patient, mockSrd()).getId().toUnqualifiedVersionless(); - - Media med = new Media(); - med.getSubject().setReferenceElement(ptId); - med.getSubtype().setText("Systolic Blood Pressure"); - med.getContent().setContentType("LCws"); - med.getContent().setDataElement(new Base64BinaryType(new byte[] {44,44,44,44,44,44,44,44})); - med.getContent().setTitle("bbbb syst"); - myMediaDao.create(med, mockSrd()); - ourLog.info(myFhirCtx.newJsonParser().encodeResourceToString(med)); - - List output = mySearchDao.suggestKeywords("Patient/" + ptId.getIdPart() + "/$everything", "_content", "press", null); - ourLog.info("Found: " + output); - assertEquals(2, output.size()); - assertEquals("Pressure", output.get(0).getTerm()); - assertEquals("Systolic Blood Pressure", output.get(1).getTerm()); - - output = mySearchDao.suggestKeywords("Patient/" + ptId.getIdPart() + "/$everything", "_content", "prezure", null); - ourLog.info("Found: " + output); - assertEquals(2, output.size()); - assertEquals("Pressure", output.get(0).getTerm()); - assertEquals("Systolic Blood Pressure", output.get(1).getTerm()); - - output = mySearchDao.suggestKeywords("Patient/" + ptId.getIdPart() + "/$everything", "_content", "syst", null); - ourLog.info("Found: " + output); - assertEquals(4, output.size()); - assertEquals("syst", output.get(0).getTerm()); - assertEquals("bbbb syst", output.get(1).getTerm()); - assertEquals("Systolic", output.get(2).getTerm()); - assertEquals("Systolic Blood Pressure", output.get(3).getTerm()); - - output = mySearchDao.suggestKeywords("Patient/" + ptId.getIdPart() + "/$everything", "_content", "LCws", null); - ourLog.info("Found: " + output); - assertEquals(0, output.size()); - } - - @Test - public void testSuggest() { - Patient patient = new Patient(); - patient.addName().setFamily("testSuggest"); - IIdType ptId = myPatientDao.create(patient, mockSrd()).getId().toUnqualifiedVersionless(); - - Observation obs = new Observation(); - obs.getSubject().setReferenceElement(ptId); - obs.getCode().setText("ZXCVBNM ASDFGHJKL QWERTYUIOPASDFGHJKL"); - myObservationDao.create(obs, mockSrd()); - - obs = new Observation(); - obs.getSubject().setReferenceElement(ptId); - obs.getCode().setText("MNBVCXZ"); - myObservationDao.create(obs, mockSrd()); - - obs = new Observation(); - obs.getSubject().setReferenceElement(ptId); - obs.getCode().setText("ZXC HELLO"); - obs.addComponent().getCode().setText("HHHHHHHHHH"); - myObservationDao.create(obs, mockSrd()); - - /* - * These shouldn't match since they're for another patient - */ - patient = new Patient(); - patient.addName().setFamily("testSuggest2"); - IIdType ptId2 = myPatientDao.create(patient, mockSrd()).getId().toUnqualifiedVersionless(); - - Observation obs2 = new Observation(); - obs2.getSubject().setReferenceElement(ptId2); - obs2.getCode().setText("ZXCVBNMZZ"); - myObservationDao.create(obs2, mockSrd()); - - List output = mySearchDao.suggestKeywords("Patient/" + ptId.getIdPart() + "/$everything", "_content", "ZXCVBNM", null); - ourLog.info("Found: " + output); - assertEquals(4, output.size()); - assertEquals("ZXCVBNM", output.get(0).getTerm()); - assertEquals("ZXCVBNM ASDFGHJKL QWERTYUIOPASDFGHJKL", output.get(1).getTerm()); - assertEquals("ZXC", output.get(2).getTerm()); - assertEquals("ZXC HELLO", output.get(3).getTerm()); - - output = mySearchDao.suggestKeywords("Patient/" + ptId.getIdPart() + "/$everything", "_content", "ZXC", null); - ourLog.info("Found: " + output); - assertEquals(4, output.size()); - assertEquals("ZXC", output.get(0).getTerm()); - assertEquals("ZXC HELLO", output.get(1).getTerm()); - assertEquals("ZXCVBNM", output.get(2).getTerm()); - assertEquals("ZXCVBNM ASDFGHJKL QWERTYUIOPASDFGHJKL", output.get(3).getTerm()); - - output = mySearchDao.suggestKeywords("Patient/" + ptId.getIdPart() + "/$everything", "_content", "HELO", null); - ourLog.info("Found: " + output); - assertEquals(2, output.size()); - assertEquals("HELLO", output.get(0).getTerm()); - assertEquals("ZXC HELLO", output.get(1).getTerm()); - - output = mySearchDao.suggestKeywords("Patient/" + ptId.getIdPart() + "/$everything", "_content", "Z", null); - ourLog.info("Found: " + output); - assertEquals(0, output.size()); - - output = mySearchDao.suggestKeywords("Patient/" + ptId.getIdPart() + "/$everything", "_content", "ZX", null); - ourLog.info("Found: " + output); - assertEquals(2, output.size()); - assertEquals("ZXC", output.get(0).getTerm()); - assertEquals("ZXC HELLO", output.get(1).getTerm()); - - } - - @Test public void testSearchAndReindex() { Patient patient; diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/predicate/PredicateBuilderCoordsTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/predicate/PredicateBuilderCoordsTest.java index f8179e050c9..57ae294152d 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/predicate/PredicateBuilderCoordsTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/predicate/PredicateBuilderCoordsTest.java @@ -3,7 +3,7 @@ package ca.uhn.fhir.jpa.dao.predicate; import ca.uhn.fhir.jpa.dao.LegacySearchBuilder; import ca.uhn.fhir.jpa.util.CoordCalculator; import ca.uhn.fhir.jpa.util.CoordCalculatorTest; -import ca.uhn.fhir.jpa.util.SearchBox; +import org.hibernate.search.engine.spatial.GeoBoundingBox; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; @@ -41,8 +41,8 @@ public class PredicateBuilderCoordsTest { @Test public void testLongitudePredicateFromBox() { - SearchBox box = CoordCalculator.getBox(CoordCalculatorTest.LATITUDE_CHIN, CoordCalculatorTest.LONGITUDE_CHIN, CoordCalculatorTest.DISTANCE_TAVEUNI); - assertThat(box.getNorthEast().getLongitude(), greaterThan(box.getSouthWest().getLongitude())); + GeoBoundingBox box = CoordCalculator.getBox(CoordCalculatorTest.LATITUDE_CHIN, CoordCalculatorTest.LONGITUDE_CHIN, CoordCalculatorTest.DISTANCE_TAVEUNI); + assertThat(box.bottomRight().longitude(), greaterThan(box.topLeft().longitude())); ArgumentCaptor andLeft = ArgumentCaptor.forClass(Predicate.class); ArgumentCaptor andRight = ArgumentCaptor.forClass(Predicate.class); @@ -58,15 +58,15 @@ public class PredicateBuilderCoordsTest { verify(myBuilder).and(andLeft.capture(), andRight.capture()); assertEquals(andLeft.getValue(), gte); assertEquals(andRight.getValue(), lte); - assertEquals(gteValue.getValue(), box.getSouthWest().getLongitude()); - assertEquals(lteValue.getValue(), box.getNorthEast().getLongitude()); + assertEquals(gteValue.getValue(), box.topLeft().longitude()); + assertEquals(lteValue.getValue(), box.bottomRight().longitude()); } @Test public void testAntiMeridianLongitudePredicateFromBox() { - SearchBox box = CoordCalculator.getBox(CoordCalculatorTest.LATITUDE_TAVEUNI, CoordCalculatorTest.LONGITIDE_TAVEUNI, CoordCalculatorTest.DISTANCE_TAVEUNI); - assertThat(box.getNorthEast().getLongitude(), lessThan(box.getSouthWest().getLongitude())); - assertTrue(box.crossesAntiMeridian()); + GeoBoundingBox box = CoordCalculator.getBox(CoordCalculatorTest.LATITUDE_TAVEUNI, CoordCalculatorTest.LONGITIDE_TAVEUNI, CoordCalculatorTest.DISTANCE_TAVEUNI); + assertThat(box.bottomRight().longitude(), lessThan(box.topLeft().longitude())); + assertTrue(box.bottomRight().longitude() < box.topLeft().longitude()); ArgumentCaptor orLeft = ArgumentCaptor.forClass(Predicate.class); ArgumentCaptor orRight = ArgumentCaptor.forClass(Predicate.class); @@ -82,8 +82,8 @@ public class PredicateBuilderCoordsTest { verify(myBuilder).or(orLeft.capture(), orRight.capture()); assertEquals(orLeft.getValue(), gte); assertEquals(orRight.getValue(), lte); - assertEquals(gteValue.getValue(), box.getNorthEast().getLongitude()); - assertEquals(lteValue.getValue(), box.getSouthWest().getLongitude()); + assertEquals(gteValue.getValue(), box.bottomRight().longitude()); + assertEquals(lteValue.getValue(), box.topLeft().longitude()); } } diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/BaseJpaR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/BaseJpaR4Test.java index 52535fafb07..608d9aca363 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/BaseJpaR4Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/BaseJpaR4Test.java @@ -54,6 +54,9 @@ import ca.uhn.fhir.jpa.dao.index.IdHelperService; import ca.uhn.fhir.jpa.entity.TermCodeSystem; import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion; import ca.uhn.fhir.jpa.entity.TermConcept; +import ca.uhn.fhir.jpa.entity.TermValueSet; +import ca.uhn.fhir.jpa.entity.TermValueSetConcept; +import ca.uhn.fhir.jpa.entity.TermValueSetConceptDesignation; import ca.uhn.fhir.jpa.interceptor.PerformanceTracingLoggingInterceptor; import ca.uhn.fhir.jpa.model.config.PartitionSettings; import ca.uhn.fhir.jpa.model.entity.ModelConfig; @@ -68,6 +71,7 @@ import ca.uhn.fhir.jpa.search.warm.ICacheWarmingSvc; import ca.uhn.fhir.jpa.searchparam.registry.SearchParamRegistryImpl; import ca.uhn.fhir.jpa.term.BaseTermReadSvcImpl; import ca.uhn.fhir.jpa.term.TermDeferredStorageSvcImpl; +import ca.uhn.fhir.jpa.term.ValueSetExpansionR4Test; import ca.uhn.fhir.jpa.term.api.ITermCodeSystemStorageSvc; import ca.uhn.fhir.jpa.term.api.ITermDeferredStorageSvc; import ca.uhn.fhir.jpa.term.api.ITermLoaderSvc; @@ -87,8 +91,8 @@ import ca.uhn.fhir.util.UrlUtil; import ca.uhn.fhir.validation.FhirValidator; import ca.uhn.fhir.validation.ValidationResult; import org.apache.commons.io.IOUtils; -import org.hibernate.search.jpa.FullTextEntityManager; -import org.hibernate.search.jpa.Search; +import org.hibernate.search.mapper.orm.Search; +import org.hibernate.search.mapper.orm.session.SearchSession; import org.hl7.fhir.common.hapi.validation.support.CachingValidationSupport; import org.hl7.fhir.common.hapi.validation.validator.FhirInstanceValidator; import org.hl7.fhir.instance.model.api.IBaseResource; @@ -159,6 +163,7 @@ import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.context.ApplicationContext; +import org.springframework.data.domain.Pageable; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit.jupiter.SpringExtension; import org.springframework.test.util.AopTestUtils; @@ -170,11 +175,14 @@ import java.io.InputStream; import java.util.ArrayList; import java.util.List; import java.util.Map; +import java.util.Optional; import java.util.stream.Collectors; +import java.util.stream.Stream; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.empty; +import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.fail; import static org.mockito.Mockito.mock; @@ -524,10 +532,10 @@ public abstract class BaseJpaR4Test extends BaseJpaTest implements ITestDataBuil @BeforeEach public void beforeFlushFT() { runInTransaction(() -> { - FullTextEntityManager ftem = Search.getFullTextEntityManager(myEntityManager); - ftem.purgeAll(ResourceTable.class); - ftem.purgeAll(ResourceIndexedSearchParamString.class); - ftem.flushToIndexes(); + SearchSession searchSession = Search.session(myEntityManager); + searchSession.workspace(ResourceTable.class).purge(); +// searchSession.workspace(ResourceIndexedSearchParamString.class).purge(); + searchSession.indexingPlan().execute(); }); myDaoConfig.setSchedulingDisabled(true); @@ -751,4 +759,68 @@ public abstract class BaseJpaR4Test extends BaseJpaTest implements ITestDataBuil return uuid; } + + protected ValueSet.ConceptReferenceDesignationComponent assertConceptContainsDesignation(ValueSet.ValueSetExpansionContainsComponent theConcept, String theLanguage, String theUseSystem, String theUseCode, String theUseDisplay, String theDesignationValue) { + Stream stream = theConcept.getDesignation().stream(); + if (theLanguage != null) { + stream = stream.filter(designation -> theLanguage.equalsIgnoreCase(designation.getLanguage())); + } + if (theUseSystem != null) { + stream = stream.filter(designation -> theUseSystem.equalsIgnoreCase(designation.getUse().getSystem())); + } + if (theUseCode != null) { + stream = stream.filter(designation -> theUseCode.equalsIgnoreCase(designation.getUse().getCode())); + } + if (theUseDisplay != null) { + stream = stream.filter(designation -> theUseDisplay.equalsIgnoreCase(designation.getUse().getDisplay())); + } + if (theDesignationValue != null) { + stream = stream.filter(designation -> theDesignationValue.equalsIgnoreCase(designation.getValue())); + } + + Optional first = stream.findFirst(); + if (!first.isPresent()) { + String failureMessage = String.format("Concept %s did not contain designation [%s|%s|%s|%s|%s] ", theConcept.toString(), theLanguage, theUseSystem, theUseCode, theUseDisplay, theDesignationValue); + fail(failureMessage); + return null; + } else { + return first.get(); + } + } + + protected ValueSet.ValueSetExpansionContainsComponent assertExpandedValueSetContainsConcept(ValueSet theValueSet, String theSystem, String theCode, String theDisplay, Integer theDesignationCount) { + List contains = theValueSet.getExpansion().getContains(); + + Stream stream = contains.stream(); + if (theSystem != null) { + stream = stream.filter(concept -> theSystem.equalsIgnoreCase(concept.getSystem())); + } + if (theCode != null ) { + stream = stream.filter(concept -> theCode.equalsIgnoreCase(concept.getCode())); + } + if (theDisplay != null){ + stream = stream.filter(concept -> theDisplay.equalsIgnoreCase(concept.getDisplay())); + } + if (theDesignationCount != null) { + stream = stream.filter(concept -> concept.getDesignation().size() == theDesignationCount); + } + + Optional first = stream.findFirst(); + if (!first.isPresent()) { + String failureMessage = String.format("Expanded ValueSet %s did not contain concept [%s|%s|%s] with [%d] designations", theValueSet.getId(), theSystem, theCode, theDisplay, theDesignationCount); + fail(failureMessage); + return null; + } else { + return first.get(); + } + } + public List getExpandedConceptsByValueSetUrl(String theValuesetUrl) { + return runInTransaction(() -> { + List valueSets = myTermValueSetDao.findTermValueSetByUrl(Pageable.unpaged(), theValuesetUrl); + assertEquals(1, valueSets.size()); + TermValueSet valueSet = valueSets.get(0); + List concepts = valueSet.getConcepts(); + return concepts.stream().map(concept -> concept.getCode()).collect(Collectors.toList()); + }); + } } diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4CodeSystemTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4CodeSystemTest.java index 724d721ac1f..9b271eaed33 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4CodeSystemTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4CodeSystemTest.java @@ -59,7 +59,7 @@ public class FhirResourceDaoR4CodeSystemTest extends BaseJpaR4Test { // Now the background scheduler will do its thing myTerminologyDeferredStorageSvc.saveDeferred(); runInTransaction(() -> { - assertEquals(1, myTermCodeSystemDao.count()); + assertEquals(0, myTermCodeSystemDao.count()); assertEquals(0, myTermCodeSystemVersionDao.count()); assertEquals(0, myTermConceptDao.count()); }); @@ -153,7 +153,7 @@ public class FhirResourceDaoR4CodeSystemTest extends BaseJpaR4Test { // The remaining versions and Code System entities should be gone now. runInTransaction(() -> { - assertEquals(1, myTermCodeSystemDao.count()); + assertEquals(0, myTermCodeSystemDao.count()); assertNull(myTermCodeSystemDao.findByCodeSystemUri("http://foo")); assertEquals(0, myTermCodeSystemVersionDao.count()); List resourceList = myResourceTableDao.findAll(); diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4DeleteTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4DeleteTest.java index 1cac6951212..28a84f6bf00 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4DeleteTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4DeleteTest.java @@ -47,7 +47,6 @@ public class FhirResourceDaoR4DeleteTest extends BaseJpaR4Test { runInTransaction(() -> { ResourceTable resourceTable = myResourceTableDao.findById(id.getIdPartAsLong()).get(); assertNotNull(resourceTable.getDeleted()); - assertTrue(resourceTable.isDeleted()); }); // Current version should be marked as deleted diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchFtTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchFtTest.java index ddd35b87b1f..80ca3ad0769 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchFtTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchFtTest.java @@ -1,16 +1,13 @@ package ca.uhn.fhir.jpa.dao.r4; import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao; -import ca.uhn.fhir.jpa.dao.FulltextSearchSvcImpl.Suggestion; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.param.*; import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails; -import ca.uhn.fhir.util.TestUtil; import org.hl7.fhir.instance.model.api.IIdType; import org.hl7.fhir.r4.model.*; import org.hl7.fhir.r4.model.Observation.ObservationStatus; -import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; @@ -122,129 +119,6 @@ public class FhirResourceDaoR4SearchFtTest extends BaseJpaR4Test { } - @Test - public void testSuggestIgnoresBase64Content() { - Patient patient = new Patient(); - patient.addName().setFamily("testSuggest"); - IIdType ptId = myPatientDao.create(patient, mockSrd()).getId().toUnqualifiedVersionless(); - - // Attached to patient - Observation obs1 = new Observation(); - obs1.setSubject(new Reference(ptId)); - obs1.getCode().setText("AAAAA"); - obs1.setValue(new StringType("Systolic Blood Pressure")); - obs1.setStatus(ObservationStatus.FINAL); - myObservationDao.create(obs1, mockSrd()).getId().toUnqualifiedVersionless(); - - // Not attached to patient - Observation obs2 = new Observation(); - obs2.getCode().setText("AAAAA"); - obs2.setValue(new StringType("Diastolic Blood Pressure")); - obs2.setStatus(ObservationStatus.FINAL); - myObservationDao.create(obs2, mockSrd()).getId().toUnqualifiedVersionless(); - - - Media med = new Media(); - med.getSubject().setReferenceElement(ptId); - med.getContent().setContentType("LCws"); - med.getContent().setDataElement(new Base64BinaryType(new byte[]{44, 44, 44, 44, 44, 44, 44, 44})); - med.getContent().setTitle("bbbb syst"); - myMediaDao.create(med, mockSrd()); - ourLog.info(myFhirCtx.newJsonParser().encodeResourceToString(med)); - - List output = mySearchDao.suggestKeywords("Patient/" + ptId.getIdPart() + "/$everything", "_content", "press", null); - ourLog.info("Found: " + output); - assertEquals(2, output.size()); - assertEquals("Pressure", output.get(0).getTerm()); - assertEquals("Systolic Blood Pressure", output.get(1).getTerm()); - - output = mySearchDao.suggestKeywords("Patient/" + ptId.getIdPart() + "/$everything", "_content", "prezure", null); - ourLog.info("Found: " + output); - assertEquals(2, output.size()); - assertEquals("Pressure", output.get(0).getTerm()); - assertEquals("Systolic Blood Pressure", output.get(1).getTerm()); - - output = mySearchDao.suggestKeywords("Patient/" + ptId.getIdPart() + "/$everything", "_content", "syst", null); - ourLog.info("Found: " + output); - assertEquals(4, output.size()); - assertEquals("syst", output.get(0).getTerm()); - assertEquals("bbbb syst", output.get(1).getTerm()); - assertEquals("Systolic", output.get(2).getTerm()); - assertEquals("Systolic Blood Pressure", output.get(3).getTerm()); - - output = mySearchDao.suggestKeywords("Patient/" + ptId.getIdPart() + "/$everything", "_content", "LCws", null); - ourLog.info("Found: " + output); - assertEquals(0, output.size()); - } - - @Test - public void testSuggest() { - Patient patient = new Patient(); - patient.addName().setFamily("testSuggest"); - IIdType ptId = myPatientDao.create(patient, mockSrd()).getId().toUnqualifiedVersionless(); - - Observation obs = new Observation(); - obs.getSubject().setReferenceElement(ptId); - obs.getCode().setText("ZXCVBNM ASDFGHJKL QWERTYUIOPASDFGHJKL"); - myObservationDao.create(obs, mockSrd()); - - obs = new Observation(); - obs.getSubject().setReferenceElement(ptId); - obs.getCode().setText("MNBVCXZ"); - myObservationDao.create(obs, mockSrd()); - - obs = new Observation(); - obs.getSubject().setReferenceElement(ptId); - obs.getCode().setText("ZXC HELLO"); - obs.addComponent().getCode().setText("HHHHHHHHHH"); - myObservationDao.create(obs, mockSrd()); - - /* - * These shouldn't match since they're for another patient - */ - patient = new Patient(); - patient.addName().setFamily("testSuggest2"); - IIdType ptId2 = myPatientDao.create(patient, mockSrd()).getId().toUnqualifiedVersionless(); - - Observation obs2 = new Observation(); - obs2.getSubject().setReferenceElement(ptId2); - obs2.getCode().setText("ZXCVBNMZZ"); - myObservationDao.create(obs2, mockSrd()); - - List output = mySearchDao.suggestKeywords("Patient/" + ptId.getIdPart() + "/$everything", "_content", "ZXCVBNM", null); - ourLog.info("Found: " + output); - assertEquals(4, output.size()); - assertEquals("ZXCVBNM", output.get(0).getTerm()); - assertEquals("ZXCVBNM ASDFGHJKL QWERTYUIOPASDFGHJKL", output.get(1).getTerm()); - assertEquals("ZXC", output.get(2).getTerm()); - assertEquals("ZXC HELLO", output.get(3).getTerm()); - - output = mySearchDao.suggestKeywords("Patient/" + ptId.getIdPart() + "/$everything", "_content", "ZXC", null); - ourLog.info("Found: " + output); - assertEquals(4, output.size()); - assertEquals("ZXC", output.get(0).getTerm()); - assertEquals("ZXC HELLO", output.get(1).getTerm()); - assertEquals("ZXCVBNM", output.get(2).getTerm()); - assertEquals("ZXCVBNM ASDFGHJKL QWERTYUIOPASDFGHJKL", output.get(3).getTerm()); - - output = mySearchDao.suggestKeywords("Patient/" + ptId.getIdPart() + "/$everything", "_content", "HELO", null); - ourLog.info("Found: " + output); - assertEquals(2, output.size()); - assertEquals("HELLO", output.get(0).getTerm()); - assertEquals("ZXC HELLO", output.get(1).getTerm()); - - output = mySearchDao.suggestKeywords("Patient/" + ptId.getIdPart() + "/$everything", "_content", "Z", null); - ourLog.info("Found: " + output); - assertEquals(0, output.size()); - - output = mySearchDao.suggestKeywords("Patient/" + ptId.getIdPart() + "/$everything", "_content", "ZX", null); - ourLog.info("Found: " + output); - assertEquals(2, output.size()); - assertEquals("ZXC", output.get(0).getTerm()); - assertEquals("ZXC HELLO", output.get(1).getTerm()); - - } - @Test public void testSearchAndReindex() { Patient patient; diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchLastNIT.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchLastNIT.java index 923ae7c116e..19076e16b97 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchLastNIT.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4SearchLastNIT.java @@ -1,6 +1,5 @@ package ca.uhn.fhir.jpa.dao.r4; -import ca.uhn.fhir.jpa.dao.LegacySearchBuilder; import ca.uhn.fhir.jpa.search.builder.SearchBuilder; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; import ca.uhn.fhir.rest.param.ReferenceParam; diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4Test.java index 82cc8a2d059..ea6d4e1a2dc 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r4/FhirResourceDaoR4Test.java @@ -6,7 +6,7 @@ import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; import ca.uhn.fhir.jpa.dao.BaseHapiFhirDao; import ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao; import ca.uhn.fhir.jpa.dao.JpaResourceDao; -import ca.uhn.fhir.jpa.entity.Search; +import ca.uhn.fhir.jpa.entity.TermConcept; import ca.uhn.fhir.jpa.model.entity.ResourceEncodingEnum; import ca.uhn.fhir.jpa.model.entity.ResourceHistoryTable; import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamString; @@ -50,6 +50,8 @@ import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.Validate; import org.hamcrest.Matchers; import org.hamcrest.core.StringContains; +import org.hibernate.search.mapper.orm.Search; +import org.hibernate.search.mapper.orm.session.SearchSession; import org.hl7.fhir.instance.model.api.IAnyResource; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.instance.model.api.IIdType; @@ -125,6 +127,7 @@ import java.util.concurrent.Future; import static org.apache.commons.lang3.StringUtils.countMatches; import static org.apache.commons.lang3.StringUtils.defaultString; import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.containsString; @@ -286,8 +289,42 @@ public class FhirResourceDaoR4Test extends BaseJpaR4Test { assertEquals(BaseHapiFhirDao.INDEX_STATUS_INDEXED, tableOpt.get().getIndexStatus().longValue()); assertThat(myResourceIndexedSearchParamTokenDao.countForResourceId(id1.getIdPartAsLong()), not(greaterThan(0))); }); + } + + @Test + public void testTermConceptReindexingDoesntDuplicateData() { + myDaoConfig.setSchedulingDisabled(true); + CodeSystem cs = new CodeSystem(); + cs.setId("nhin-use"); + cs.setUrl("http://zoop.com"); + cs.setContent(CodeSystem.CodeSystemContentMode.COMPLETE); + cs.addConcept().setCode("zoop1").setDisplay("zoop_disp1").setDefinition("zoop_defi1"); + cs.addConcept().setCode("zoop2").setDisplay("zoop_disp2").setDefinition("zoop_defi2"); + cs.addConcept().setCode("zoop3").setDisplay("zoop_disp3").setDefinition("zoop_defi3"); + + IIdType id1 = myCodeSystemDao.create(cs).getId().toUnqualifiedVersionless(); + + runInTransaction(() -> { + assertEquals(3L, myTermConceptDao.count()); + + SearchSession session = Search.session(myEntityManager); + List termConcepts = session.search(TermConcept.class).where(f -> f.matchAll()).fetchAllHits(); + assertEquals(3, termConcepts.size()); + }); + + myResourceReindexingSvc.markAllResourcesForReindexing(); + myResourceReindexingSvc.forceReindexingPass(); + myTerminologyDeferredStorageSvc.saveAllDeferred(); + + runInTransaction(() -> { + assertEquals(3L, myTermConceptDao.count()); + + SearchSession session = Search.session(myEntityManager); + List termConcepts = session.search(TermConcept.class).where(f -> f.matchAll()).fetchAllHits(); + assertEquals(3, termConcepts.size()); + }); } @Test @@ -3948,7 +3985,7 @@ public class FhirResourceDaoR4Test extends BaseJpaR4Test { String uuid = UUID.randomUUID().toString(); runInTransaction(() -> { - Search search = new Search(); + ca.uhn.fhir.jpa.entity.Search search = new ca.uhn.fhir.jpa.entity.Search(); SearchCoordinatorSvcImpl.populateSearchEntity(map, "Encounter", uuid, normalized, search, RequestPartitionId.allPartitions()); search.setStatus(SearchStatusEnum.FAILED); search.setFailureCode(500); diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r5/BaseJpaR5Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r5/BaseJpaR5Test.java index 85784c49eb6..14283fa83d7 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r5/BaseJpaR5Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r5/BaseJpaR5Test.java @@ -45,6 +45,8 @@ import ca.uhn.fhir.jpa.dao.data.ITermValueSetConceptDao; import ca.uhn.fhir.jpa.dao.data.ITermValueSetConceptDesignationDao; import ca.uhn.fhir.jpa.dao.data.ITermValueSetDao; import ca.uhn.fhir.jpa.dao.dstu2.FhirResourceDaoDstu2SearchNoFtTest; +import ca.uhn.fhir.jpa.entity.TermValueSet; +import ca.uhn.fhir.jpa.entity.TermValueSetConcept; import ca.uhn.fhir.jpa.interceptor.PerformanceTracingLoggingInterceptor; import ca.uhn.fhir.jpa.model.entity.ModelConfig; import ca.uhn.fhir.jpa.model.entity.ResourceIndexedSearchParamString; @@ -58,6 +60,7 @@ import ca.uhn.fhir.jpa.searchparam.registry.SearchParamRegistryImpl; import ca.uhn.fhir.jpa.subscription.match.registry.SubscriptionRegistry; import ca.uhn.fhir.jpa.term.BaseTermReadSvcImpl; import ca.uhn.fhir.jpa.term.TermDeferredStorageSvcImpl; +import ca.uhn.fhir.jpa.term.ValueSetExpansionR4Test; import ca.uhn.fhir.jpa.term.api.ITermCodeSystemStorageSvc; import ca.uhn.fhir.jpa.term.api.ITermDeferredStorageSvc; import ca.uhn.fhir.jpa.term.api.ITermReadSvcR5; @@ -73,8 +76,8 @@ import ca.uhn.fhir.util.UrlUtil; import ca.uhn.fhir.validation.FhirValidator; import ca.uhn.fhir.validation.ValidationResult; import org.apache.commons.io.IOUtils; -import org.hibernate.search.jpa.FullTextEntityManager; -import org.hibernate.search.jpa.Search; +import org.hibernate.search.mapper.orm.Search; +import org.hibernate.search.mapper.orm.session.SearchSession; import org.hl7.fhir.common.hapi.validation.validator.FhirInstanceValidator; import org.hl7.fhir.instance.model.api.IBaseResource; import org.hl7.fhir.r5.model.AllergyIntolerance; @@ -137,6 +140,7 @@ import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.context.ApplicationContext; +import org.springframework.data.domain.Pageable; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit.jupiter.SpringExtension; import org.springframework.test.util.AopTestUtils; @@ -149,7 +153,11 @@ import java.io.InputStream; import java.nio.charset.StandardCharsets; import java.util.List; import java.util.Map; +import java.util.Optional; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.fail; import static org.mockito.Mockito.mock; @@ -454,10 +462,10 @@ public abstract class BaseJpaR5Test extends BaseJpaTest { @BeforeEach public void beforeFlushFT() { runInTransaction(() -> { - FullTextEntityManager ftem = Search.getFullTextEntityManager(myEntityManager); - ftem.purgeAll(ResourceTable.class); - ftem.purgeAll(ResourceIndexedSearchParamString.class); - ftem.flushToIndexes(); + SearchSession searchSession = Search.session(myEntityManager); + searchSession.workspace(ResourceTable.class).purge(); +// searchSession.workspace(ResourceIndexedSearchParamString.class).purge(); + searchSession.indexingPlan().execute(); }); myDaoConfig.setSchedulingDisabled(true); @@ -626,5 +634,40 @@ public abstract class BaseJpaR5Test extends BaseJpaTest { String[] uuidParams = params.get(Constants.PARAM_PAGINGACTION); return uuidParams[0]; } + protected ValueSet.ValueSetExpansionContainsComponent assertExpandedValueSetContainsConcept(ValueSet theValueSet, String theSystem, String theCode, String theDisplay, Integer theDesignationCount) { + List contains = theValueSet.getExpansion().getContains(); + + Stream stream = contains.stream(); + if (theSystem != null) { + stream = stream.filter(concept -> theSystem.equalsIgnoreCase(concept.getSystem())); + } + if (theCode != null ) { + stream = stream.filter(concept -> theCode.equalsIgnoreCase(concept.getCode())); + } + if (theDisplay != null){ + stream = stream.filter(concept -> theDisplay.equalsIgnoreCase(concept.getDisplay())); + } + if (theDesignationCount != null) { + stream = stream.filter(concept -> concept.getDesignation().size() == theDesignationCount); + } + + Optional first = stream.findFirst(); + if (!first.isPresent()) { + String failureMessage = String.format("Expanded ValueSet %s did not contain concept [%s|%s|%s] with [%d] designations", theValueSet.getId(), theSystem, theCode, theDisplay, theDesignationCount); + fail(failureMessage); + return null; + } else { + return first.get(); + } + } + public List getExpandedConceptsByValueSetUrl(String theValuesetUrl) { + return runInTransaction(() -> { + List valueSets = myTermValueSetDao.findTermValueSetByUrl(Pageable.unpaged(), theValuesetUrl); + assertEquals(1, valueSets.size()); + TermValueSet valueSet = valueSets.get(0); + List concepts = valueSet.getConcepts(); + return concepts.stream().map(concept -> concept.getCode()).collect(Collectors.toList()); + }); + } } diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r5/FhirResourceDaoR5CodeSystemTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r5/FhirResourceDaoR5CodeSystemTest.java index 6e7e00a44c7..e7bcd8dd016 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r5/FhirResourceDaoR5CodeSystemTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/dao/r5/FhirResourceDaoR5CodeSystemTest.java @@ -43,7 +43,7 @@ public class FhirResourceDaoR5CodeSystemTest extends BaseJpaR5Test { // Now the background scheduler will do its thing myTermDeferredStorageSvc.saveDeferred(); runInTransaction(() -> { - assertEquals(1, myTermCodeSystemDao.count()); + assertEquals(0, myTermCodeSystemDao.count()); assertEquals(0, myTermCodeSystemVersionDao.count()); assertEquals(0, myTermConceptDao.count()); }); @@ -137,7 +137,7 @@ public class FhirResourceDaoR5CodeSystemTest extends BaseJpaR5Test { // The remaining versions and Code System entities should be gone now. runInTransaction(() -> { - assertEquals(1, myTermCodeSystemDao.count()); + assertEquals(0, myTermCodeSystemDao.count()); assertNull(myTermCodeSystemDao.findByCodeSystemUri("http://foo")); assertEquals(0, myTermCodeSystemVersionDao.count()); List resourceList = myResourceTableDao.findAll(); diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/SystemProviderDstu2Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/SystemProviderDstu2Test.java index e38bcd3c903..d1dbf9d1f30 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/SystemProviderDstu2Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/SystemProviderDstu2Test.java @@ -194,99 +194,6 @@ public class SystemProviderDstu2Test extends BaseJpaDstu2Test { } } - @Transactional(propagation = Propagation.NEVER) - @Test - public void testSuggestKeywords() throws Exception { - - Patient patient = new Patient(); - patient.addName().addFamily("testSuggest"); - IIdType ptId = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless(); - - Observation obs = new Observation(); - obs.getCode().setText("ZXCVBNM ASDFGHJKL QWERTYUIOPASDFGHJKL"); - obs.getSubject().setReference(ptId); - IIdType obsId = myObservationDao.create(obs, mySrd).getId().toUnqualifiedVersionless(); - - obs = new Observation(); - obs.setId(obsId); - obs.getSubject().setReference(ptId); - obs.getCode().setText("ZXCVBNM ASDFGHJKL QWERTYUIOPASDFGHJKL"); - myObservationDao.update(obs, mySrd); - - // Try to wait for the indexing to complete - waitForSize(2, ()-> fetchSuggestionCount(ptId)); - - HttpGet get = new HttpGet(ourServerBase + "/$suggest-keywords?context=Patient/" + ptId.getIdPart() + "/$everything&searchParam=_content&text=zxc&_pretty=true&_format=xml"); - try (CloseableHttpResponse http = ourHttpClient.execute(get)) { - assertEquals(200, http.getStatusLine().getStatusCode()); - String output = IOUtils.toString(http.getEntity().getContent(), StandardCharsets.UTF_8); - ourLog.info(output); - - Parameters parameters = ourCtx.newXmlParser().parseResource(Parameters.class, output); - assertEquals(2, parameters.getParameter().size()); - assertEquals("keyword", parameters.getParameter().get(0).getPart().get(0).getName()); - assertEquals(new StringDt("ZXCVBNM"), parameters.getParameter().get(0).getPart().get(0).getValue()); - assertEquals("score", parameters.getParameter().get(0).getPart().get(1).getName()); - assertEquals(new DecimalDt("1.0"), parameters.getParameter().get(0).getPart().get(1).getValue()); - - } - } - - private Number fetchSuggestionCount(IIdType thePtId) throws IOException { - HttpGet get = new HttpGet(ourServerBase + "/$suggest-keywords?context=Patient/" + thePtId.getIdPart() + "/$everything&searchParam=_content&text=zxc&_pretty=true&_format=xml"); - try (CloseableHttpResponse http = ourHttpClient.execute(get)) { - assertEquals(200, http.getStatusLine().getStatusCode()); - String output = IOUtils.toString(http.getEntity().getContent(), StandardCharsets.UTF_8); - Parameters parameters = ourCtx.newXmlParser().parseResource(Parameters.class, output); - return parameters.getParameter().size(); - } - } - - @Test - public void testSuggestKeywordsInvalid() throws Exception { - Patient patient = new Patient(); - patient.addName().addFamily("testSuggest"); - IIdType ptId = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless(); - - Observation obs = new Observation(); - obs.getSubject().setReference(ptId); - obs.getCode().setText("ZXCVBNM ASDFGHJKL QWERTYUIOPASDFGHJKL"); - myObservationDao.create(obs, mySrd); - - HttpGet get = new HttpGet(ourServerBase + "/$suggest-keywords"); - CloseableHttpResponse http = ourHttpClient.execute(get); - try { - assertEquals(400, http.getStatusLine().getStatusCode()); - String output = IOUtils.toString(http.getEntity().getContent(), StandardCharsets.UTF_8); - ourLog.info(output); - assertThat(output, containsString("Parameter 'context' must be provided")); - } finally { - http.close(); - } - - get = new HttpGet(ourServerBase + "/$suggest-keywords?context=Patient/" + ptId.getIdPart() + "/$everything"); - http = ourHttpClient.execute(get); - try { - assertEquals(400, http.getStatusLine().getStatusCode()); - String output = IOUtils.toString(http.getEntity().getContent(), StandardCharsets.UTF_8); - ourLog.info(output); - assertThat(output, containsString("Parameter 'searchParam' must be provided")); - } finally { - http.close(); - } - - get = new HttpGet(ourServerBase + "/$suggest-keywords?context=Patient/" + ptId.getIdPart() + "/$everything&searchParam=aa"); - http = ourHttpClient.execute(get); - try { - assertEquals(400, http.getStatusLine().getStatusCode()); - String output = IOUtils.toString(http.getEntity().getContent(), StandardCharsets.UTF_8); - ourLog.info(output); - assertThat(output, containsString("Parameter 'text' must be provided")); - } finally { - http.close(); - } - - } @Test public void testGetOperationDefinition() { diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/ResourceProviderDstu3CodeSystemTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/ResourceProviderDstu3CodeSystemTest.java index 8dc57145104..7f8aa7a0fb5 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/ResourceProviderDstu3CodeSystemTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/ResourceProviderDstu3CodeSystemTest.java @@ -14,6 +14,7 @@ import org.junit.jupiter.api.Test; import org.springframework.transaction.annotation.Transactional; import java.io.IOException; +import java.util.stream.Collectors; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.fail; @@ -35,7 +36,11 @@ public class ResourceProviderDstu3CodeSystemTest extends BaseResourceProviderDst @Test public void testLookupOnExternalCode() { - ResourceProviderDstu3ValueSetTest.createExternalCs(myCodeSystemDao, myResourceTableDao, myTermCodeSystemStorageSvc, mySrd); + ResourceProviderDstu3ValueSetTest.createExternalCs(myCodeSystemDao, myResourceTableDao, myTermCodeSystemStorageSvc, mySrd, myCaptureQueriesListener); + + runInTransaction(()->{ + ourLog.info("Code system versions:\n * " + myTermCodeSystemVersionDao.findAll().stream().map(t->t.toString()).collect(Collectors.joining("\n * "))); + }); Parameters respParam = ourClient .operation() diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/ResourceProviderDstu3ValueSetTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/ResourceProviderDstu3ValueSetTest.java index 25cf1919a7a..9505b1ba7dd 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/ResourceProviderDstu3ValueSetTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/ResourceProviderDstu3ValueSetTest.java @@ -7,6 +7,8 @@ import ca.uhn.fhir.jpa.dao.r4.FhirResourceDaoR4TerminologyTest; import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion; import ca.uhn.fhir.jpa.entity.TermConcept; import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink.RelationshipTypeEnum; +import ca.uhn.fhir.jpa.util.BaseCaptureQueriesListener; +import ca.uhn.fhir.jpa.util.CircularQueueCaptureQueriesListener; import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId; import ca.uhn.fhir.jpa.model.entity.ResourceTable; import ca.uhn.fhir.jpa.term.api.ITermCodeSystemStorageSvc; @@ -949,6 +951,11 @@ public class ResourceProviderDstu3ValueSetTest extends BaseResourceProviderDstu3 public static CodeSystem createExternalCs(IFhirResourceDao theCodeSystemDao, IResourceTableDao theResourceTableDao, ITermCodeSystemStorageSvc theTermCodeSystemStorageSvc, ServletRequestDetails theRequestDetails) { + return createExternalCs(theCodeSystemDao, theResourceTableDao, theTermCodeSystemStorageSvc, theRequestDetails, null); + } + + @Nonnull + public static CodeSystem createExternalCs(IFhirResourceDao theCodeSystemDao, IResourceTableDao theResourceTableDao, ITermCodeSystemStorageSvc theTermCodeSystemStorageSvc, ServletRequestDetails theRequestDetails, CircularQueueCaptureQueriesListener theCaptureQueriesListener) { CodeSystem codeSystem = new CodeSystem(); codeSystem.setUrl(URL_MY_CODE_SYSTEM); codeSystem.setContent(CodeSystemContentMode.NOTPRESENT); @@ -979,7 +986,15 @@ public class ResourceProviderDstu3ValueSetTest extends BaseResourceProviderDstu3 TermConcept parentB = new TermConcept(cs, "ParentB").setDisplay("Parent B"); cs.getConcepts().add(parentB); + + ourLog.info("About to update CodeSystem"); + if (theCaptureQueriesListener != null) { + theCaptureQueriesListener.clear(); + } theTermCodeSystemStorageSvc.storeNewCodeSystemVersion(new ResourcePersistentId(table.getId()), URL_MY_CODE_SYSTEM, "SYSTEM NAME", "SYSTEM VERSION", cs, table); + if (theCaptureQueriesListener != null) { + theCaptureQueriesListener.logAllQueries(); + } return codeSystem; } diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/ResourceProviderDstu3ValueSetVersionedTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/ResourceProviderDstu3ValueSetVersionedTest.java index 78b47bec79e..8d174d880f6 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/ResourceProviderDstu3ValueSetVersionedTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/dstu3/ResourceProviderDstu3ValueSetVersionedTest.java @@ -50,6 +50,8 @@ import static ca.uhn.fhir.jpa.dao.dstu3.FhirResourceDaoDstu3TerminologyTest.URL_ import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.containsStringIgnoringCase; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.stringContainsInOrder; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -1110,68 +1112,24 @@ public class ResourceProviderDstu3ValueSetVersionedTest extends BaseResourceProv assertEquals(theCodeSystem.getConcept().size(), termValueSet.getConcepts().size()); assertEquals(TermValueSetPreExpansionStatusEnum.EXPANDED, termValueSet.getExpansionStatus()); - TermValueSetConcept concept = termValueSet.getConcepts().get(0); - ourLog.info("Concept:\n" + concept.toString()); - assertEquals("http://acme.org", concept.getSystem()); - assertEquals("1", concept.getSystemVersion()); - assertEquals("8450-9", concept.getCode()); - assertEquals("Systolic blood pressure--expiration", concept.getDisplay()); - assertEquals(2, concept.getDesignations().size()); - assertEquals(0, concept.getOrder()); + TermValueSetConcept concept = assertTermValueSetContainsConceptAndIsInDeclaredOrder(termValueSet, "http://acme.org", "8450-9", "Systolic blood pressure--expiration", 2); + assertTermConceptContainsDesignation(concept, "nl", "http://snomed.info/sct", "900000000000013009", "Synonym", "Systolische bloeddruk - expiratie"); + assertTermConceptContainsDesignation(concept, "sv", "http://snomed.info/sct", "900000000000013009", "Synonym", "Systoliskt blodtryck - utgång"); - TermValueSetConceptDesignation designation = concept.getDesignations().get(0); - assertEquals("nl", designation.getLanguage()); - assertEquals("http://snomed.info/sct", designation.getUseSystem()); - assertEquals("900000000000013009", designation.getUseCode()); - assertEquals("Synonym", designation.getUseDisplay()); - assertEquals("Systolische bloeddruk - expiratie", designation.getValue()); - - designation = concept.getDesignations().get(1); - assertEquals("sv", designation.getLanguage()); - assertEquals("http://snomed.info/sct", designation.getUseSystem()); - assertEquals("900000000000013009", designation.getUseCode()); - assertEquals("Synonym", designation.getUseDisplay()); - assertEquals("Systoliskt blodtryck - utgång", designation.getValue()); - - concept = termValueSet.getConcepts().get(1); - ourLog.info("Concept:\n" + concept.toString()); - assertEquals("http://acme.org", concept.getSystem()); - assertEquals("1", concept.getSystemVersion()); - assertEquals("11378-7", concept.getCode()); - assertEquals("Systolic blood pressure at First encounter", concept.getDisplay()); - assertEquals(0, concept.getDesignations().size()); - assertEquals(1, concept.getOrder()); + assertTermValueSetContainsConceptAndIsInDeclaredOrder(termValueSet, "http://acme.org", "11378-7", "Systolic blood pressure at First encounter", 0); // ... - concept = termValueSet.getConcepts().get(22); - ourLog.info("Concept:\n" + concept.toString()); - assertEquals("http://acme.org", concept.getSystem()); - assertEquals("1", concept.getSystemVersion()); - assertEquals("8491-3", concept.getCode()); - assertEquals("Systolic blood pressure 1 hour minimum", concept.getDisplay()); - assertEquals(1, concept.getDesignations().size()); - assertEquals(22, concept.getOrder()); + TermValueSetConcept otherConcept = assertTermValueSetContainsConceptAndIsInDeclaredOrder(termValueSet, "http://acme.org", "8491-3", "Systolic blood pressure 1 hour minimum", 1); + assertTermConceptContainsDesignation(otherConcept, "nl", "http://snomed.info/sct", "900000000000013009", "Synonym", "Systolische bloeddruk minimaal 1 uur"); - designation = concept.getDesignations().get(0); - assertEquals("nl", designation.getLanguage()); - assertEquals("http://snomed.info/sct", designation.getUseSystem()); - assertEquals("900000000000013009", designation.getUseCode()); - assertEquals("Synonym", designation.getUseDisplay()); - assertEquals("Systolische bloeddruk minimaal 1 uur", designation.getValue()); + assertTermValueSetContainsConceptAndIsInDeclaredOrder(termValueSet, "http://acme.org", "8492-1", "Systolic blood pressure 8 hour minimum", 0); - concept = termValueSet.getConcepts().get(23); - ourLog.info("Concept:\n" + concept.toString()); - assertEquals("http://acme.org", concept.getSystem()); - assertEquals("1", concept.getSystemVersion()); - assertEquals("8492-1", concept.getCode()); - assertEquals("Systolic blood pressure 8 hour minimum", concept.getDisplay()); - assertEquals(0, concept.getDesignations().size()); - assertEquals(23, concept.getOrder()); }); } private void validateTermValueSetExpandedAndChildrenV2(String theValueSetName, CodeSystem theCodeSystem) { + runInTransaction(() -> { Optional optionalValueSetByResourcePid = myTermValueSetDao.findByResourcePid(myExtensionalVsIdOnResourceTable_v2); assertTrue(optionalValueSetByResourcePid.isPresent()); @@ -1187,64 +1145,22 @@ public class ResourceProviderDstu3ValueSetVersionedTest extends BaseResourceProv assertEquals(theCodeSystem.getConcept().size(), termValueSet.getConcepts().size()); assertEquals(TermValueSetPreExpansionStatusEnum.EXPANDED, termValueSet.getExpansionStatus()); - TermValueSetConcept concept = termValueSet.getConcepts().get(0); - ourLog.info("Concept:\n" + concept.toString()); - assertEquals("http://acme.org", concept.getSystem()); - assertEquals("2", concept.getSystemVersion()); - assertEquals("8450-9", concept.getCode()); - assertEquals("Systolic blood pressure--expiration v2", concept.getDisplay()); - assertEquals(2, concept.getDesignations().size()); - assertEquals(0, concept.getOrder()); + TermValueSetConcept concept = assertTermValueSetContainsConceptAndIsInDeclaredOrder(termValueSet, "http://acme.org", "8450-9", "Systolic blood pressure--expiration v2", 2); + assertThat(concept.getSystemVersion(), is(equalTo("2"))); + assertTermConceptContainsDesignation(concept, "nl", "http://snomed.info/sct", "900000000000013009", "Synonym", "Systolische bloeddruk - expiratie"); + assertTermConceptContainsDesignation(concept, "sv", "http://snomed.info/sct", "900000000000013009", "Synonym", "Systoliskt blodtryck - utgång"); - TermValueSetConceptDesignation designation = concept.getDesignations().get(0); - assertEquals("nl", designation.getLanguage()); - assertEquals("http://snomed.info/sct", designation.getUseSystem()); - assertEquals("900000000000013009", designation.getUseCode()); - assertEquals("Synonym", designation.getUseDisplay()); - assertEquals("Systolische bloeddruk - expiratie", designation.getValue()); - - designation = concept.getDesignations().get(1); - assertEquals("sv", designation.getLanguage()); - assertEquals("http://snomed.info/sct", designation.getUseSystem()); - assertEquals("900000000000013009", designation.getUseCode()); - assertEquals("Synonym", designation.getUseDisplay()); - assertEquals("Systoliskt blodtryck - utgång", designation.getValue()); - - concept = termValueSet.getConcepts().get(1); - ourLog.info("Concept:\n" + concept.toString()); - assertEquals("http://acme.org", concept.getSystem()); - assertEquals("2", concept.getSystemVersion()); - assertEquals("11378-7", concept.getCode()); - assertEquals("Systolic blood pressure at First encounter v2", concept.getDisplay()); - assertEquals(0, concept.getDesignations().size()); - assertEquals(1, concept.getOrder()); + TermValueSetConcept termValueSetConcept1 = assertTermValueSetContainsConceptAndIsInDeclaredOrder(termValueSet, "http://acme.org", "11378-7", "Systolic blood pressure at First encounter v2", 0); + assertThat(termValueSetConcept1.getSystemVersion(), is(equalTo("2"))); // ... - concept = termValueSet.getConcepts().get(22); - ourLog.info("Concept:\n" + concept.toString()); - assertEquals("http://acme.org", concept.getSystem()); - assertEquals("2", concept.getSystemVersion()); - assertEquals("8491-3", concept.getCode()); - assertEquals("Systolic blood pressure 1 hour minimum v2", concept.getDisplay()); - assertEquals(1, concept.getDesignations().size()); - assertEquals(22, concept.getOrder()); + TermValueSetConcept otherConcept = assertTermValueSetContainsConceptAndIsInDeclaredOrder(termValueSet, "http://acme.org", "8491-3", "Systolic blood pressure 1 hour minimum v2", 1); + assertThat(otherConcept.getSystemVersion(), is(equalTo("2"))); + assertTermConceptContainsDesignation(otherConcept, "nl", "http://snomed.info/sct", "900000000000013009", "Synonym", "Systolische bloeddruk minimaal 1 uur"); - designation = concept.getDesignations().get(0); - assertEquals("nl", designation.getLanguage()); - assertEquals("http://snomed.info/sct", designation.getUseSystem()); - assertEquals("900000000000013009", designation.getUseCode()); - assertEquals("Synonym", designation.getUseDisplay()); - assertEquals("Systolische bloeddruk minimaal 1 uur", designation.getValue()); - - concept = termValueSet.getConcepts().get(23); - ourLog.info("Concept:\n" + concept.toString()); - assertEquals("http://acme.org", concept.getSystem()); - assertEquals("2", concept.getSystemVersion()); - assertEquals("8492-1", concept.getCode()); - assertEquals("Systolic blood pressure 8 hour minimum v2", concept.getDisplay()); - assertEquals(0, concept.getDesignations().size()); - assertEquals(23, concept.getOrder()); + TermValueSetConcept termValueSetConcept = assertTermValueSetContainsConceptAndIsInDeclaredOrder(termValueSet, "http://acme.org", "8492-1", "Systolic blood pressure 8 hour minimum v2", 0); + assertThat(termValueSetConcept.getSystemVersion(), is(equalTo("2"))); }); } diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/BaseResourceProviderR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/BaseResourceProviderR4Test.java index e8706afa599..6e1a9329f26 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/BaseResourceProviderR4Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/BaseResourceProviderR4Test.java @@ -38,6 +38,7 @@ import org.hl7.fhir.r4.model.Patient; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeEach; +import org.slf4j.Logger; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.web.context.ContextLoader; import org.springframework.web.context.WebApplicationContext; @@ -53,8 +54,11 @@ import java.util.List; import java.util.concurrent.TimeUnit; import static org.apache.commons.lang3.StringUtils.isNotBlank; +import static org.slf4j.LoggerFactory.getLogger; public abstract class BaseResourceProviderR4Test extends BaseJpaR4Test { + private static final Logger ourLog = getLogger(BaseResourceProviderR4Test.class); + protected static IValidationSupport myValidationSupport; protected static CloseableHttpClient ourHttpClient; diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ConsentInterceptorResourceProviderR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ConsentInterceptorResourceProviderR4Test.java index b81c372fc87..3fbd8e3e15a 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ConsentInterceptorResourceProviderR4Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ConsentInterceptorResourceProviderR4Test.java @@ -58,7 +58,6 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; -import java.util.Optional; import java.util.stream.Collectors; import static org.apache.commons.lang3.StringUtils.leftPad; @@ -561,8 +560,8 @@ public class ConsentInterceptorResourceProviderR4Test extends BaseResourceProvid assertEquals(1, response.getEntry().size()); assertNull(response.getTotalElement().getValue()); - runInTransaction(()->{ - Search search = mySearchEntityDao.findByUuidAndFetchIncludes(searchId).orElseThrow(()->new IllegalStateException()); + runInTransaction(() -> { + Search search = mySearchEntityDao.findByUuidAndFetchIncludes(searchId).orElseThrow(() -> new IllegalStateException()); assertEquals(3, search.getNumFound()); assertEquals(1, search.getNumBlocked()); assertEquals(2, search.getTotalCount()); @@ -577,7 +576,8 @@ public class ConsentInterceptorResourceProviderR4Test extends BaseResourceProvid */ @Test public void testDefaultInterceptorAllowsAll() { - myConsentInterceptor = new ConsentInterceptor(new IConsentService() {}); + myConsentInterceptor = new ConsentInterceptor(new IConsentService() { + }); ourRestServer.getInterceptorService().registerInterceptor(myConsentInterceptor); myClient.create().resource(new Patient().setGender(Enumerations.AdministrativeGender.MALE).addName(new HumanName().setFamily("1"))).execute(); @@ -598,8 +598,8 @@ public class ConsentInterceptorResourceProviderR4Test extends BaseResourceProvid // The paging should have ended now - but the last redacted female result is an empty existing page which should never have been there. assertNotNull(BundleUtil.getLinkUrlOfType(myFhirCtx, response, "next")); - runInTransaction(()->{ - Search search = mySearchEntityDao.findByUuidAndFetchIncludes(searchId).orElseThrow(()->new IllegalStateException()); + runInTransaction(() -> { + Search search = mySearchEntityDao.findByUuidAndFetchIncludes(searchId).orElseThrow(() -> new IllegalStateException()); assertEquals(3, search.getNumFound()); assertEquals(0, search.getNumBlocked()); assertEquals(3, search.getTotalCount()); @@ -611,7 +611,8 @@ public class ConsentInterceptorResourceProviderR4Test extends BaseResourceProvid */ @Test public void testDefaultInterceptorAllowsFailure() { - myConsentInterceptor = new ConsentInterceptor(new IConsentService() {}); + myConsentInterceptor = new ConsentInterceptor(new IConsentService() { + }); ourRestServer.getInterceptorService().registerInterceptor(myConsentInterceptor); myClient.create().resource(new Patient().setGender(Enumerations.AdministrativeGender.MALE).addName(new HumanName().setFamily("1"))).execute(); diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4CodeSystemTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4CodeSystemTest.java index b66b1e613b5..b23d6b487f6 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4CodeSystemTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4CodeSystemTest.java @@ -55,7 +55,9 @@ public class ResourceProviderR4CodeSystemTest extends BaseResourceProviderR4Test @Test public void testLookupOnExternalCode() { + myCaptureQueriesListener.clear(); ResourceProviderR4ValueSetNoVerCSNoVerTest.createExternalCs(myCodeSystemDao, myResourceTableDao, myTermCodeSystemStorageSvc, mySrd); + myCaptureQueriesListener.logAllQueriesForCurrentThread(); Parameters respParam = myClient .operation() diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4CodeSystemVersionedTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4CodeSystemVersionedTest.java index 2bbb894515d..bde81bbdc47 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4CodeSystemVersionedTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4CodeSystemVersionedTest.java @@ -791,7 +791,9 @@ public class ResourceProviderR4CodeSystemVersionedTest extends BaseResourceProvi String encoded = myFhirCtx.newJsonParser().encodeResourceToString(initialCodeSystem); HttpPut putRequest = new HttpPut(ourServerBase + "/CodeSystem/" + parentChildCs1Id); putRequest.setEntity(new StringEntity(encoded, ContentType.parse("application/json+fhir"))); + myCaptureQueriesListener.clear(); CloseableHttpResponse resp = ourHttpClient.execute(putRequest); + myCaptureQueriesListener.logAllQueries(); try { assertEquals(200, resp.getStatusLine().getStatusCode()); } finally { diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4ValueSetNoVerCSNoVerTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4ValueSetNoVerCSNoVerTest.java index 935c4a9048c..9a1794f65be 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4ValueSetNoVerCSNoVerTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4ValueSetNoVerCSNoVerTest.java @@ -61,6 +61,7 @@ import static org.awaitility.Awaitility.await; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.containsStringIgnoringCase; +import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.stringContainsInOrder; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -540,9 +541,10 @@ public class ResourceProviderR4ValueSetNoVerCSNoVerTest extends BaseResourceProv ValueSet expanded = (ValueSet) respParam.getParameter().get(0).getResource(); String resp = myFhirCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(expanded); + ourLog.info("zoop"); ourLog.info(resp); - assertThat(resp, containsStringIgnoringCase("")); + assertThat(resp, is(containsStringIgnoringCase(""))); } @Test @@ -916,60 +918,19 @@ public class ResourceProviderR4ValueSetNoVerCSNoVerTest extends BaseResourceProv assertEquals(theCodeSystem.getConcept().size(), termValueSet.getConcepts().size()); assertEquals(TermValueSetPreExpansionStatusEnum.EXPANDED, termValueSet.getExpansionStatus()); - TermValueSetConcept concept = termValueSet.getConcepts().get(0); - ourLog.info("Concept:\n" + concept.toString()); - assertEquals("http://acme.org", concept.getSystem()); - assertEquals("8450-9", concept.getCode()); - assertEquals("Systolic blood pressure--expiration", concept.getDisplay()); - assertEquals(2, concept.getDesignations().size()); - assertEquals(0, concept.getOrder()); - TermValueSetConceptDesignation designation = concept.getDesignations().get(0); - assertEquals("nl", designation.getLanguage()); - assertEquals("http://snomed.info/sct", designation.getUseSystem()); - assertEquals("900000000000013009", designation.getUseCode()); - assertEquals("Synonym", designation.getUseDisplay()); - assertEquals("Systolische bloeddruk - expiratie", designation.getValue()); + TermValueSetConcept concept = assertTermValueSetContainsConceptAndIsInDeclaredOrder(termValueSet, "http://acme.org", "8450-9", "Systolic blood pressure--expiration", 2); + assertTermConceptContainsDesignation(concept, "nl", "http://snomed.info/sct", "900000000000013009", "Synonym", "Systolische bloeddruk - expiratie"); + assertTermConceptContainsDesignation(concept, "sv", "http://snomed.info/sct", "900000000000013009", "Synonym", "Systoliskt blodtryck - utgång"); - designation = concept.getDesignations().get(1); - assertEquals("sv", designation.getLanguage()); - assertEquals("http://snomed.info/sct", designation.getUseSystem()); - assertEquals("900000000000013009", designation.getUseCode()); - assertEquals("Synonym", designation.getUseDisplay()); - assertEquals("Systoliskt blodtryck - utgång", designation.getValue()); - - concept = termValueSet.getConcepts().get(1); - ourLog.info("Concept:\n" + concept.toString()); - assertEquals("http://acme.org", concept.getSystem()); - assertEquals("11378-7", concept.getCode()); - assertEquals("Systolic blood pressure at First encounter", concept.getDisplay()); - assertEquals(0, concept.getDesignations().size()); - assertEquals(1, concept.getOrder()); + assertTermValueSetContainsConceptAndIsInDeclaredOrder(termValueSet, "http://acme.org", "11378-7", "Systolic blood pressure at First encounter", 0); // ... - concept = termValueSet.getConcepts().get(22); - ourLog.info("Concept:\n" + concept.toString()); - assertEquals("http://acme.org", concept.getSystem()); - assertEquals("8491-3", concept.getCode()); - assertEquals("Systolic blood pressure 1 hour minimum", concept.getDisplay()); - assertEquals(1, concept.getDesignations().size()); - assertEquals(22, concept.getOrder()); + TermValueSetConcept otherConcept = assertTermValueSetContainsConceptAndIsInDeclaredOrder(termValueSet, "http://acme.org", "8491-3", "Systolic blood pressure 1 hour minimum", 1); + assertTermConceptContainsDesignation(otherConcept, "nl", "http://snomed.info/sct", "900000000000013009", "Synonym", "Systolische bloeddruk minimaal 1 uur"); - designation = concept.getDesignations().get(0); - assertEquals("nl", designation.getLanguage()); - assertEquals("http://snomed.info/sct", designation.getUseSystem()); - assertEquals("900000000000013009", designation.getUseCode()); - assertEquals("Synonym", designation.getUseDisplay()); - assertEquals("Systolische bloeddruk minimaal 1 uur", designation.getValue()); - - concept = termValueSet.getConcepts().get(23); - ourLog.info("Concept:\n" + concept.toString()); - assertEquals("http://acme.org", concept.getSystem()); - assertEquals("8492-1", concept.getCode()); - assertEquals("Systolic blood pressure 8 hour minimum", concept.getDisplay()); - assertEquals(0, concept.getDesignations().size()); - assertEquals(23, concept.getOrder()); + assertTermValueSetContainsConceptAndIsInDeclaredOrder(termValueSet, "http://acme.org", "8492-1", "Systolic blood pressure 8 hour minimum", 0); }); } diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4ValueSetVerCSNoVerTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4ValueSetVerCSNoVerTest.java index de4bbcbc275..e55fcbadf86 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4ValueSetVerCSNoVerTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4ValueSetVerCSNoVerTest.java @@ -815,60 +815,19 @@ public class ResourceProviderR4ValueSetVerCSNoVerTest extends BaseResourceProvid assertEquals(theCodeSystem.getConcept().size(), termValueSet.getConcepts().size()); assertEquals(TermValueSetPreExpansionStatusEnum.EXPANDED, termValueSet.getExpansionStatus()); - TermValueSetConcept concept = termValueSet.getConcepts().get(0); - ourLog.info("Concept:\n" + concept.toString()); - assertEquals("http://acme.org", concept.getSystem()); - assertEquals("8450-9", concept.getCode()); - assertEquals("Systolic blood pressure--expiration", concept.getDisplay()); - assertEquals(2, concept.getDesignations().size()); - assertEquals(0, concept.getOrder()); - TermValueSetConceptDesignation designation = concept.getDesignations().get(0); - assertEquals("nl", designation.getLanguage()); - assertEquals("http://snomed.info/sct", designation.getUseSystem()); - assertEquals("900000000000013009", designation.getUseCode()); - assertEquals("Synonym", designation.getUseDisplay()); - assertEquals("Systolische bloeddruk - expiratie", designation.getValue()); + TermValueSetConcept concept = assertTermValueSetContainsConceptAndIsInDeclaredOrder(termValueSet, "http://acme.org", "8450-9", "Systolic blood pressure--expiration", 2); + assertTermConceptContainsDesignation(concept, "nl", "http://snomed.info/sct", "900000000000013009", "Synonym", "Systolische bloeddruk - expiratie"); + assertTermConceptContainsDesignation(concept, "sv", "http://snomed.info/sct", "900000000000013009", "Synonym", "Systoliskt blodtryck - utgång"); - designation = concept.getDesignations().get(1); - assertEquals("sv", designation.getLanguage()); - assertEquals("http://snomed.info/sct", designation.getUseSystem()); - assertEquals("900000000000013009", designation.getUseCode()); - assertEquals("Synonym", designation.getUseDisplay()); - assertEquals("Systoliskt blodtryck - utgång", designation.getValue()); - - concept = termValueSet.getConcepts().get(1); - ourLog.info("Concept:\n" + concept.toString()); - assertEquals("http://acme.org", concept.getSystem()); - assertEquals("11378-7", concept.getCode()); - assertEquals("Systolic blood pressure at First encounter", concept.getDisplay()); - assertEquals(0, concept.getDesignations().size()); - assertEquals(1, concept.getOrder()); + assertTermValueSetContainsConceptAndIsInDeclaredOrder(termValueSet, "http://acme.org", "11378-7", "Systolic blood pressure at First encounter", 0); // ... - concept = termValueSet.getConcepts().get(22); - ourLog.info("Concept:\n" + concept.toString()); - assertEquals("http://acme.org", concept.getSystem()); - assertEquals("8491-3", concept.getCode()); - assertEquals("Systolic blood pressure 1 hour minimum", concept.getDisplay()); - assertEquals(1, concept.getDesignations().size()); - assertEquals(22, concept.getOrder()); + TermValueSetConcept otherConcept = assertTermValueSetContainsConceptAndIsInDeclaredOrder(termValueSet, "http://acme.org", "8491-3", "Systolic blood pressure 1 hour minimum", 1); + assertTermConceptContainsDesignation(otherConcept, "nl", "http://snomed.info/sct", "900000000000013009", "Synonym", "Systolische bloeddruk minimaal 1 uur"); - designation = concept.getDesignations().get(0); - assertEquals("nl", designation.getLanguage()); - assertEquals("http://snomed.info/sct", designation.getUseSystem()); - assertEquals("900000000000013009", designation.getUseCode()); - assertEquals("Synonym", designation.getUseDisplay()); - assertEquals("Systolische bloeddruk minimaal 1 uur", designation.getValue()); - - concept = termValueSet.getConcepts().get(23); - ourLog.info("Concept:\n" + concept.toString()); - assertEquals("http://acme.org", concept.getSystem()); - assertEquals("8492-1", concept.getCode()); - assertEquals("Systolic blood pressure 8 hour minimum", concept.getDisplay()); - assertEquals(0, concept.getDesignations().size()); - assertEquals(23, concept.getOrder()); + assertTermValueSetContainsConceptAndIsInDeclaredOrder(termValueSet, "http://acme.org", "8492-1", "Systolic blood pressure 8 hour minimum", 0); }); } diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4ValueSetVerCSVerTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4ValueSetVerCSVerTest.java index 0d58fe9f55f..8c8708078d2 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4ValueSetVerCSVerTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/ResourceProviderR4ValueSetVerCSVerTest.java @@ -1,6 +1,5 @@ package ca.uhn.fhir.jpa.provider.r4; -import ca.uhn.fhir.context.support.ValueSetExpansionOptions; import ca.uhn.fhir.jpa.api.config.DaoConfig; import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao; import ca.uhn.fhir.jpa.dao.data.IResourceTableDao; @@ -9,14 +8,10 @@ import ca.uhn.fhir.jpa.entity.TermConcept; import ca.uhn.fhir.jpa.entity.TermConceptParentChildLink.RelationshipTypeEnum; import ca.uhn.fhir.jpa.entity.TermValueSet; import ca.uhn.fhir.jpa.entity.TermValueSetConcept; -import ca.uhn.fhir.jpa.entity.TermValueSetConceptDesignation; import ca.uhn.fhir.jpa.entity.TermValueSetPreExpansionStatusEnum; import ca.uhn.fhir.jpa.model.entity.ResourceTable; -import ca.uhn.fhir.jpa.model.util.JpaConstants; -import ca.uhn.fhir.jpa.search.builder.SearchBuilder; import ca.uhn.fhir.jpa.term.api.ITermCodeSystemStorageSvc; import ca.uhn.fhir.jpa.term.api.ITermReadSvc; -import ca.uhn.fhir.jpa.term.custom.CustomTerminologySet; import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId; import ca.uhn.fhir.rest.server.exceptions.ResourceNotFoundException; import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException; @@ -41,7 +36,6 @@ import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Test; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.data.domain.PageRequest; -import org.springframework.data.domain.Pageable; import org.springframework.data.domain.Slice; import org.springframework.transaction.TransactionStatus; import org.springframework.transaction.support.TransactionCallbackWithoutResult; @@ -49,9 +43,7 @@ import org.springframework.transaction.support.TransactionTemplate; import javax.annotation.Nonnull; import java.io.IOException; -import java.util.List; import java.util.Optional; -import java.util.stream.Collectors; import static ca.uhn.fhir.jpa.dao.r4.FhirResourceDaoR4TerminologyTest.URL_MY_CODE_SYSTEM; import static ca.uhn.fhir.jpa.dao.r4.FhirResourceDaoR4TerminologyTest.URL_MY_VALUE_SET; @@ -59,6 +51,8 @@ import static org.awaitility.Awaitility.await; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.containsStringIgnoringCase; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.stringContainsInOrder; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -117,7 +111,7 @@ public class ResourceProviderR4ValueSetVerCSVerTest extends BaseResourceProvider theCodeSystem.setId("CodeSystem/cs2"); theCodeSystem.setVersion("2"); - for(CodeSystem.ConceptDefinitionComponent conceptDefinitionComponent : theCodeSystem.getConcept()) { + for (CodeSystem.ConceptDefinitionComponent conceptDefinitionComponent : theCodeSystem.getConcept()) { conceptDefinitionComponent.setDisplay(conceptDefinitionComponent.getDisplay() + " v2"); } new TransactionTemplate(myTxManager).execute(new TransactionCallbackWithoutResult() { @@ -940,7 +934,7 @@ public class ResourceProviderR4ValueSetVerCSVerTest extends BaseResourceProvider ourLog.info("Updated ValueSet:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(updatedValueSet_v1)); String updatedValueSetName_v1 = valueSet_v1.getName(); - validateTermValueSetNotExpanded(updatedValueSetName_v1,"1", myExtensionalVsIdOnResourceTable_v1); + validateTermValueSetNotExpanded(updatedValueSetName_v1, "1", myExtensionalVsIdOnResourceTable_v1); ValueSet updatedValueSet_v2 = valueSet_v2; updatedValueSet_v2.setName(valueSet_v2.getName().concat(" - MODIFIED")); @@ -949,7 +943,7 @@ public class ResourceProviderR4ValueSetVerCSVerTest extends BaseResourceProvider ourLog.info("Updated ValueSet:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(updatedValueSet_v2)); String updatedValueSetName_v2 = valueSet_v2.getName(); - validateTermValueSetNotExpanded(updatedValueSetName_v2,"2", myExtensionalVsIdOnResourceTable_v2); + validateTermValueSetNotExpanded(updatedValueSetName_v2, "2", myExtensionalVsIdOnResourceTable_v2); myTermSvc.preExpandDeferredValueSetsToTerminologyTables(); validateTermValueSetExpandedAndChildrenV1(updatedValueSetName_v1, codeSystem_v1); @@ -1066,64 +1060,19 @@ public class ResourceProviderR4ValueSetVerCSVerTest extends BaseResourceProvider assertEquals(theCodeSystem.getConcept().size(), termValueSet.getConcepts().size()); assertEquals(TermValueSetPreExpansionStatusEnum.EXPANDED, termValueSet.getExpansionStatus()); - TermValueSetConcept concept = termValueSet.getConcepts().get(0); - ourLog.info("Concept:\n" + concept.toString()); - assertEquals("http://acme.org", concept.getSystem()); - assertEquals("1", concept.getSystemVersion()); - assertEquals("8450-9", concept.getCode()); - assertEquals("Systolic blood pressure--expiration", concept.getDisplay()); - assertEquals(2, concept.getDesignations().size()); - assertEquals(0, concept.getOrder()); + TermValueSetConcept concept = assertTermValueSetContainsConceptAndIsInDeclaredOrder(termValueSet, "http://acme.org", "8450-9", "Systolic blood pressure--expiration", 2); + assertTermConceptContainsDesignation(concept, "nl", "http://snomed.info/sct", "900000000000013009", "Synonym", "Systolische bloeddruk - expiratie"); + assertTermConceptContainsDesignation(concept, "sv", "http://snomed.info/sct", "900000000000013009", "Synonym", "Systoliskt blodtryck - utgång"); - TermValueSetConceptDesignation designation = concept.getDesignations().get(0); - assertEquals("nl", designation.getLanguage()); - assertEquals("http://snomed.info/sct", designation.getUseSystem()); - assertEquals("900000000000013009", designation.getUseCode()); - assertEquals("Synonym", designation.getUseDisplay()); - assertEquals("Systolische bloeddruk - expiratie", designation.getValue()); - - designation = concept.getDesignations().get(1); - assertEquals("sv", designation.getLanguage()); - assertEquals("http://snomed.info/sct", designation.getUseSystem()); - assertEquals("900000000000013009", designation.getUseCode()); - assertEquals("Synonym", designation.getUseDisplay()); - assertEquals("Systoliskt blodtryck - utgång", designation.getValue()); - - concept = termValueSet.getConcepts().get(1); - ourLog.info("Concept:\n" + concept.toString()); - assertEquals("http://acme.org", concept.getSystem()); - assertEquals("1", concept.getSystemVersion()); - assertEquals("11378-7", concept.getCode()); - assertEquals("Systolic blood pressure at First encounter", concept.getDisplay()); - assertEquals(0, concept.getDesignations().size()); - assertEquals(1, concept.getOrder()); + assertTermValueSetContainsConceptAndIsInDeclaredOrder(termValueSet, "http://acme.org", "11378-7", "Systolic blood pressure at First encounter", 0); // ... - concept = termValueSet.getConcepts().get(22); - ourLog.info("Concept:\n" + concept.toString()); - assertEquals("http://acme.org", concept.getSystem()); - assertEquals("1", concept.getSystemVersion()); - assertEquals("8491-3", concept.getCode()); - assertEquals("Systolic blood pressure 1 hour minimum", concept.getDisplay()); - assertEquals(1, concept.getDesignations().size()); - assertEquals(22, concept.getOrder()); + TermValueSetConcept otherConcept = assertTermValueSetContainsConceptAndIsInDeclaredOrder(termValueSet, "http://acme.org", "8491-3", "Systolic blood pressure 1 hour minimum", 1); + assertTermConceptContainsDesignation(otherConcept, "nl", "http://snomed.info/sct", "900000000000013009", "Synonym", "Systolische bloeddruk minimaal 1 uur"); - designation = concept.getDesignations().get(0); - assertEquals("nl", designation.getLanguage()); - assertEquals("http://snomed.info/sct", designation.getUseSystem()); - assertEquals("900000000000013009", designation.getUseCode()); - assertEquals("Synonym", designation.getUseDisplay()); - assertEquals("Systolische bloeddruk minimaal 1 uur", designation.getValue()); + assertTermValueSetContainsConceptAndIsInDeclaredOrder(termValueSet, "http://acme.org", "8492-1", "Systolic blood pressure 8 hour minimum", 0); - concept = termValueSet.getConcepts().get(23); - ourLog.info("Concept:\n" + concept.toString()); - assertEquals("http://acme.org", concept.getSystem()); - assertEquals("1", concept.getSystemVersion()); - assertEquals("8492-1", concept.getCode()); - assertEquals("Systolic blood pressure 8 hour minimum", concept.getDisplay()); - assertEquals(0, concept.getDesignations().size()); - assertEquals(23, concept.getOrder()); }); } @@ -1143,64 +1092,22 @@ public class ResourceProviderR4ValueSetVerCSVerTest extends BaseResourceProvider assertEquals(theCodeSystem.getConcept().size(), termValueSet.getConcepts().size()); assertEquals(TermValueSetPreExpansionStatusEnum.EXPANDED, termValueSet.getExpansionStatus()); - TermValueSetConcept concept = termValueSet.getConcepts().get(0); - ourLog.info("Concept:\n" + concept.toString()); - assertEquals("http://acme.org", concept.getSystem()); - assertEquals("2", concept.getSystemVersion()); - assertEquals("8450-9", concept.getCode()); - assertEquals("Systolic blood pressure--expiration v2", concept.getDisplay()); - assertEquals(2, concept.getDesignations().size()); - assertEquals(0, concept.getOrder()); + TermValueSetConcept concept = assertTermValueSetContainsConceptAndIsInDeclaredOrder(termValueSet, "http://acme.org", "8450-9", "Systolic blood pressure--expiration v2", 2); + assertThat(concept.getSystemVersion(), is(equalTo("2"))); + assertTermConceptContainsDesignation(concept, "nl", "http://snomed.info/sct", "900000000000013009", "Synonym", "Systolische bloeddruk - expiratie"); + assertTermConceptContainsDesignation(concept, "sv", "http://snomed.info/sct", "900000000000013009", "Synonym", "Systoliskt blodtryck - utgång"); - TermValueSetConceptDesignation designation = concept.getDesignations().get(0); - assertEquals("nl", designation.getLanguage()); - assertEquals("http://snomed.info/sct", designation.getUseSystem()); - assertEquals("900000000000013009", designation.getUseCode()); - assertEquals("Synonym", designation.getUseDisplay()); - assertEquals("Systolische bloeddruk - expiratie", designation.getValue()); - - designation = concept.getDesignations().get(1); - assertEquals("sv", designation.getLanguage()); - assertEquals("http://snomed.info/sct", designation.getUseSystem()); - assertEquals("900000000000013009", designation.getUseCode()); - assertEquals("Synonym", designation.getUseDisplay()); - assertEquals("Systoliskt blodtryck - utgång", designation.getValue()); - - concept = termValueSet.getConcepts().get(1); - ourLog.info("Concept:\n" + concept.toString()); - assertEquals("http://acme.org", concept.getSystem()); - assertEquals("2", concept.getSystemVersion()); - assertEquals("11378-7", concept.getCode()); - assertEquals("Systolic blood pressure at First encounter v2", concept.getDisplay()); - assertEquals(0, concept.getDesignations().size()); - assertEquals(1, concept.getOrder()); + TermValueSetConcept termValueSetConcept1 = assertTermValueSetContainsConceptAndIsInDeclaredOrder(termValueSet, "http://acme.org", "11378-7", "Systolic blood pressure at First encounter v2", 0); + assertThat(termValueSetConcept1.getSystemVersion(), is(equalTo("2"))); // ... - concept = termValueSet.getConcepts().get(22); - ourLog.info("Concept:\n" + concept.toString()); - assertEquals("http://acme.org", concept.getSystem()); - assertEquals("2", concept.getSystemVersion()); - assertEquals("8491-3", concept.getCode()); - assertEquals("Systolic blood pressure 1 hour minimum v2", concept.getDisplay()); - assertEquals(1, concept.getDesignations().size()); - assertEquals(22, concept.getOrder()); + TermValueSetConcept otherConcept = assertTermValueSetContainsConceptAndIsInDeclaredOrder(termValueSet, "http://acme.org", "8491-3", "Systolic blood pressure 1 hour minimum v2", 1); + assertThat(otherConcept.getSystemVersion(), is(equalTo("2"))); + assertTermConceptContainsDesignation(otherConcept, "nl", "http://snomed.info/sct", "900000000000013009", "Synonym", "Systolische bloeddruk minimaal 1 uur"); - designation = concept.getDesignations().get(0); - assertEquals("nl", designation.getLanguage()); - assertEquals("http://snomed.info/sct", designation.getUseSystem()); - assertEquals("900000000000013009", designation.getUseCode()); - assertEquals("Synonym", designation.getUseDisplay()); - assertEquals("Systolische bloeddruk minimaal 1 uur", designation.getValue()); - - concept = termValueSet.getConcepts().get(23); - ourLog.info("Concept:\n" + concept.toString()); - assertEquals("http://acme.org", concept.getSystem()); - assertEquals("2", concept.getSystemVersion()); - assertEquals("8492-1", concept.getCode()); - assertEquals("Systolic blood pressure 8 hour minimum v2", concept.getDisplay()); - assertEquals(0, concept.getDesignations().size()); - assertEquals(23, concept.getOrder()); + TermValueSetConcept termValueSetConcept = assertTermValueSetContainsConceptAndIsInDeclaredOrder(termValueSet, "http://acme.org", "8492-1", "Systolic blood pressure 8 hour minimum v2", 0); + assertThat(termValueSetConcept.getSystemVersion(), is(equalTo("2"))); }); } @@ -1747,7 +1654,7 @@ public class ResourceProviderR4ValueSetVerCSVerTest extends BaseResourceProvider private boolean clearDeferredStorageQueue() { - if(!myTerminologyDeferredStorageSvc.isStorageQueueEmpty()) { + if (!myTerminologyDeferredStorageSvc.isStorageQueueEmpty()) { myTerminologyDeferredStorageSvc.saveAllDeferred(); return false; } else { @@ -1762,8 +1669,8 @@ public class ResourceProviderR4ValueSetVerCSVerTest extends BaseResourceProvider try { persistLocalVs(createLocalVs(URL_MY_CODE_SYSTEM, "1")); fail(); - } catch (UnprocessableEntityException theE) { - assertThat(theE.getMessage(), containsString("Can not create multiple ValueSet resources with ValueSet.url \"" + URL_MY_VALUE_SET + "\" and ValueSet.version \"1\", already have one with resource ID: ")); + } catch (UnprocessableEntityException e) { + assertThat(e.getMessage(), containsString("Can not create multiple ValueSet resources with ValueSet.url \"" + URL_MY_VALUE_SET + "\" and ValueSet.version \"1\", already have one with resource ID: ")); } } diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/SystemProviderR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/SystemProviderR4Test.java index 7f48fde859d..0b7976947cb 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/SystemProviderR4Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r4/SystemProviderR4Test.java @@ -305,104 +305,6 @@ public class SystemProviderR4Test extends BaseJpaR4Test { assertThat(http.getFirstHeader("Content-Type").getValue(), containsString("application/fhir+json")); } - - @Transactional(propagation = Propagation.NEVER) - @Test - public void testSuggestKeywords() throws Exception { - - Patient patient = new Patient(); - patient.addName().setFamily("testSuggest"); - IIdType ptId = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless(); - - Observation obs = new Observation(); - obs.getCode().setText("ZXCVBNM ASDFGHJKL QWERTYUIOPASDFGHJKL"); - obs.getSubject().setReferenceElement(ptId); - IIdType obsId = myObservationDao.create(obs, mySrd).getId().toUnqualifiedVersionless(); - - obs = new Observation(); - obs.setId(obsId); - obs.getSubject().setReferenceElement(ptId); - obs.getCode().setText("ZXCVBNM ASDFGHJKL QWERTYUIOPASDFGHJKL"); - myObservationDao.update(obs, mySrd); - - // Try to wait for the indexing to complete - waitForSize(2, () -> fetchSuggestionCount(ptId)); - - HttpGet get = new HttpGet(ourServerBase + "/$suggest-keywords?context=Patient/" + ptId.getIdPart() + "/$everything&searchParam=_content&text=zxc&_pretty=true&_format=xml"); - CloseableHttpResponse http = ourHttpClient.execute(get); - try { - assertEquals(200, http.getStatusLine().getStatusCode()); - String output = IOUtils.toString(http.getEntity().getContent(), StandardCharsets.UTF_8); - ourLog.info(output); - - Parameters parameters = ourCtx.newXmlParser().parseResource(Parameters.class, output); - assertEquals(2, parameters.getParameter().size()); - assertEquals("keyword", parameters.getParameter().get(0).getPart().get(0).getName()); - assertEquals(("ZXCVBNM"), ((StringType) parameters.getParameter().get(0).getPart().get(0).getValue()).getValueAsString()); - assertEquals("score", parameters.getParameter().get(0).getPart().get(1).getName()); - assertEquals(("1.0"), ((DecimalType) parameters.getParameter().get(0).getPart().get(1).getValue()).getValueAsString()); - - } finally { - http.close(); - } - } - - private Number fetchSuggestionCount(IIdType thePtId) throws IOException { - HttpGet get = new HttpGet(ourServerBase + "/$suggest-keywords?context=Patient/" + thePtId.getIdPart() + "/$everything&searchParam=_content&text=zxc&_pretty=true&_format=xml"); - try (CloseableHttpResponse http = ourHttpClient.execute(get)) { - assertEquals(200, http.getStatusLine().getStatusCode()); - String output = IOUtils.toString(http.getEntity().getContent(), StandardCharsets.UTF_8); - Parameters parameters = ourCtx.newXmlParser().parseResource(Parameters.class, output); - return parameters.getParameter().size(); - } - } - - @Test - public void testSuggestKeywordsInvalid() throws Exception { - Patient patient = new Patient(); - patient.addName().setFamily("testSuggest"); - IIdType ptId = myPatientDao.create(patient, mySrd).getId().toUnqualifiedVersionless(); - - Observation obs = new Observation(); - obs.getSubject().setReferenceElement(ptId); - obs.getCode().setText("ZXCVBNM ASDFGHJKL QWERTYUIOPASDFGHJKL"); - myObservationDao.create(obs, mySrd); - - HttpGet get = new HttpGet(ourServerBase + "/$suggest-keywords"); - CloseableHttpResponse http = ourHttpClient.execute(get); - try { - assertEquals(400, http.getStatusLine().getStatusCode()); - String output = IOUtils.toString(http.getEntity().getContent(), StandardCharsets.UTF_8); - ourLog.info(output); - assertThat(output, containsString("Parameter 'context' must be provided")); - } finally { - http.close(); - } - - get = new HttpGet(ourServerBase + "/$suggest-keywords?context=Patient/" + ptId.getIdPart() + "/$everything"); - http = ourHttpClient.execute(get); - try { - assertEquals(400, http.getStatusLine().getStatusCode()); - String output = IOUtils.toString(http.getEntity().getContent(), StandardCharsets.UTF_8); - ourLog.info(output); - assertThat(output, containsString("Parameter 'searchParam' must be provided")); - } finally { - http.close(); - } - - get = new HttpGet(ourServerBase + "/$suggest-keywords?context=Patient/" + ptId.getIdPart() + "/$everything&searchParam=aa"); - http = ourHttpClient.execute(get); - try { - assertEquals(400, http.getStatusLine().getStatusCode()); - String output = IOUtils.toString(http.getEntity().getContent(), StandardCharsets.UTF_8); - ourLog.info(output); - assertThat(output, containsString("Parameter 'text' must be provided")); - } finally { - http.close(); - } - - } - @Test public void testTransactionCount() throws Exception { for (int i = 0; i < 20; i++) { diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r5/ResourceProviderR5ValueSetTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r5/ResourceProviderR5ValueSetTest.java index 70607cc7b9a..142d5bf93ba 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r5/ResourceProviderR5ValueSetTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r5/ResourceProviderR5ValueSetTest.java @@ -47,15 +47,20 @@ import org.springframework.transaction.TransactionStatus; import org.springframework.transaction.support.TransactionCallbackWithoutResult; import org.springframework.transaction.support.TransactionTemplate; +import javax.annotation.Nonnull; import java.io.IOException; import java.nio.charset.StandardCharsets; +import java.util.List; import java.util.Optional; +import java.util.stream.Collectors; import static ca.uhn.fhir.jpa.dao.r4.FhirResourceDaoR4TerminologyTest.URL_MY_CODE_SYSTEM; import static ca.uhn.fhir.jpa.dao.r4.FhirResourceDaoR4TerminologyTest.URL_MY_VALUE_SET; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.containsStringIgnoringCase; +import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.stringContainsInOrder; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -293,6 +298,7 @@ public class ResourceProviderR5ValueSetTest extends BaseResourceProviderR5Test { loadAndPersistCodeSystemAndValueSet(HTTPVerb.POST); myTermSvc.preExpandDeferredValueSetsToTerminologyTables(); + List expandedConceptsByValueSetUrl = getExpandedConceptsByValueSetUrl("http://www.healthintersections.com.au/fhir/ValueSet/extensional-case-2"); Parameters respParam = myClient .operation() .onInstance(myExtensionalVsId) @@ -304,6 +310,7 @@ public class ResourceProviderR5ValueSetTest extends BaseResourceProviderR5Test { String resp = myFhirCtx.newXmlParser().setPrettyPrint(true).encodeResourceToString(expanded); ourLog.info(resp); + assertEquals(24, expanded.getExpansion().getTotal()); assertEquals(1, expanded.getExpansion().getOffset()); assertEquals("offset", expanded.getExpansion().getParameter().get(0).getName()); @@ -311,13 +318,14 @@ public class ResourceProviderR5ValueSetTest extends BaseResourceProviderR5Test { assertEquals("count", expanded.getExpansion().getParameter().get(1).getName()); assertEquals(1000, expanded.getExpansion().getParameter().get(1).getValueIntegerType().getValue().intValue()); assertEquals(23, expanded.getExpansion().getContains().size()); - assertEquals("http://acme.org", expanded.getExpansion().getContains().get(0).getSystem()); - assertEquals("11378-7", expanded.getExpansion().getContains().get(0).getCode()); - assertEquals("Systolic blood pressure at First encounter", expanded.getExpansion().getContains().get(0).getDisplay()); - assertEquals("http://acme.org", expanded.getExpansion().getContains().get(1).getSystem()); - assertEquals("8493-9", expanded.getExpansion().getContains().get(1).getCode()); - assertEquals("Systolic blood pressure 10 hour minimum", expanded.getExpansion().getContains().get(1).getDisplay()); + + assertThat(toCodes(expanded), is(equalTo(expandedConceptsByValueSetUrl.subList(1,24)))); + } + + @Nonnull + public List toCodes(ValueSet theExpandedValueSet) { + return theExpandedValueSet.getExpansion().getContains().stream().map(t -> t.getCode()).collect(Collectors.toList()); } @Test @@ -326,7 +334,7 @@ public class ResourceProviderR5ValueSetTest extends BaseResourceProviderR5Test { loadAndPersistCodeSystemAndValueSet(HTTPVerb.POST); myTermSvc.preExpandDeferredValueSetsToTerminologyTables(); - + List expandedConcepts = getExpandedConceptsByValueSetUrl("http://www.healthintersections.com.au/fhir/ValueSet/extensional-case-2"); Parameters respParam = myClient .operation() .onInstance(myExtensionalVsId) @@ -345,9 +353,7 @@ public class ResourceProviderR5ValueSetTest extends BaseResourceProviderR5Test { assertEquals("count", expanded.getExpansion().getParameter().get(1).getName()); assertEquals(1, expanded.getExpansion().getParameter().get(1).getValueIntegerType().getValue().intValue()); assertEquals(1, expanded.getExpansion().getContains().size()); - assertEquals("http://acme.org", expanded.getExpansion().getContainsFirstRep().getSystem()); - assertEquals("8450-9", expanded.getExpansion().getContainsFirstRep().getCode()); - assertEquals("Systolic blood pressure--expiration", expanded.getExpansion().getContainsFirstRep().getDisplay()); + assertThat(toCodes(expanded), is(equalTo(expandedConcepts.subList(0,1)))); } @@ -358,6 +364,8 @@ public class ResourceProviderR5ValueSetTest extends BaseResourceProviderR5Test { loadAndPersistCodeSystemAndValueSet(HTTPVerb.POST); myTermSvc.preExpandDeferredValueSetsToTerminologyTables(); + List expandedConcepts = getExpandedConceptsByValueSetUrl("http://www.healthintersections.com.au/fhir/ValueSet/extensional-case-2"); + Parameters respParam = myClient .operation() .onInstance(myExtensionalVsId) @@ -377,10 +385,8 @@ public class ResourceProviderR5ValueSetTest extends BaseResourceProviderR5Test { assertEquals("count", expanded.getExpansion().getParameter().get(1).getName()); assertEquals(1, expanded.getExpansion().getParameter().get(1).getValueIntegerType().getValue().intValue()); assertEquals(1, expanded.getExpansion().getContains().size()); - assertEquals("http://acme.org", expanded.getExpansion().getContainsFirstRep().getSystem()); - assertEquals("11378-7", expanded.getExpansion().getContainsFirstRep().getCode()); - assertEquals("Systolic blood pressure at First encounter", expanded.getExpansion().getContainsFirstRep().getDisplay()); + assertThat(toCodes(expanded), is(equalTo(expandedConcepts.subList(1,2)))); } @Test @@ -512,6 +518,8 @@ public class ResourceProviderR5ValueSetTest extends BaseResourceProviderR5Test { loadAndPersistCodeSystemAndValueSet(HTTPVerb.POST); myTermSvc.preExpandDeferredValueSetsToTerminologyTables(); + List expandedConcepts = getExpandedConceptsByValueSetUrl("http://www.healthintersections.com.au/fhir/ValueSet/extensional-case-2"); + Parameters respParam = myClient .operation() .onType(ValueSet.class) @@ -531,13 +539,7 @@ public class ResourceProviderR5ValueSetTest extends BaseResourceProviderR5Test { assertEquals("count", expanded.getExpansion().getParameter().get(1).getName()); assertEquals(1000, expanded.getExpansion().getParameter().get(1).getValueIntegerType().getValue().intValue()); assertEquals(23, expanded.getExpansion().getContains().size()); - assertEquals("http://acme.org", expanded.getExpansion().getContains().get(0).getSystem()); - assertEquals("11378-7", expanded.getExpansion().getContains().get(0).getCode()); - assertEquals("Systolic blood pressure at First encounter", expanded.getExpansion().getContains().get(0).getDisplay()); - assertEquals("http://acme.org", expanded.getExpansion().getContains().get(1).getSystem()); - assertEquals("8493-9", expanded.getExpansion().getContains().get(1).getCode()); - assertEquals("Systolic blood pressure 10 hour minimum", expanded.getExpansion().getContains().get(1).getDisplay()); - + assertThat(toCodes(expanded), is(equalTo(expandedConcepts.subList(1,24)))); } @Test @@ -546,6 +548,7 @@ public class ResourceProviderR5ValueSetTest extends BaseResourceProviderR5Test { loadAndPersistCodeSystemAndValueSet(HTTPVerb.POST); myTermSvc.preExpandDeferredValueSetsToTerminologyTables(); + List expandedConcepts = getExpandedConceptsByValueSetUrl("http://www.healthintersections.com.au/fhir/ValueSet/extensional-case-2"); Parameters respParam = myClient .operation() @@ -566,10 +569,7 @@ public class ResourceProviderR5ValueSetTest extends BaseResourceProviderR5Test { assertEquals("count", expanded.getExpansion().getParameter().get(1).getName()); assertEquals(1, expanded.getExpansion().getParameter().get(1).getValueIntegerType().getValue().intValue()); assertEquals(1, expanded.getExpansion().getContains().size()); - assertEquals("http://acme.org", expanded.getExpansion().getContainsFirstRep().getSystem()); - assertEquals("8450-9", expanded.getExpansion().getContainsFirstRep().getCode()); - assertEquals("Systolic blood pressure--expiration", expanded.getExpansion().getContainsFirstRep().getDisplay()); - + assertThat(toCodes(expanded), is(equalTo(expandedConcepts.subList(0,1)))); } @Test @@ -579,6 +579,8 @@ public class ResourceProviderR5ValueSetTest extends BaseResourceProviderR5Test { loadAndPersistCodeSystemAndValueSet(HTTPVerb.POST); myTermSvc.preExpandDeferredValueSetsToTerminologyTables(); + List expandedConcepts = getExpandedConceptsByValueSetUrl("http://www.healthintersections.com.au/fhir/ValueSet/extensional-case-2"); + Parameters respParam = myClient .operation() .onType(ValueSet.class) @@ -599,9 +601,7 @@ public class ResourceProviderR5ValueSetTest extends BaseResourceProviderR5Test { assertEquals("count", expanded.getExpansion().getParameter().get(1).getName()); assertEquals(1, expanded.getExpansion().getParameter().get(1).getValueIntegerType().getValue().intValue()); assertEquals(1, expanded.getExpansion().getContains().size()); - assertEquals("http://acme.org", expanded.getExpansion().getContainsFirstRep().getSystem()); - assertEquals("11378-7", expanded.getExpansion().getContainsFirstRep().getCode()); - assertEquals("Systolic blood pressure at First encounter", expanded.getExpansion().getContainsFirstRep().getDisplay()); + assertThat(toCodes(expanded), is(equalTo(expandedConcepts.subList(1,2)))); } @@ -681,6 +681,7 @@ public class ResourceProviderR5ValueSetTest extends BaseResourceProviderR5Test { ValueSet toExpand = loadResourceFromClasspath(ValueSet.class, "/extensional-case-3-vs.xml"); + List expandedConcepts = getExpandedConceptsByValueSetUrl("http://www.healthintersections.com.au/fhir/ValueSet/extensional-case-2"); Parameters respParam = myClient .operation() .onType(ValueSet.class) @@ -700,12 +701,7 @@ public class ResourceProviderR5ValueSetTest extends BaseResourceProviderR5Test { assertEquals("count", expanded.getExpansion().getParameter().get(1).getName()); assertEquals(1000, expanded.getExpansion().getParameter().get(1).getValueIntegerType().getValue().intValue()); assertEquals(23, expanded.getExpansion().getContains().size()); - assertEquals("http://acme.org", expanded.getExpansion().getContains().get(0).getSystem()); - assertEquals("11378-7", expanded.getExpansion().getContains().get(0).getCode()); - assertEquals("Systolic blood pressure at First encounter", expanded.getExpansion().getContains().get(0).getDisplay()); - assertEquals("http://acme.org", expanded.getExpansion().getContains().get(1).getSystem()); - assertEquals("8493-9", expanded.getExpansion().getContains().get(1).getCode()); - assertEquals("Systolic blood pressure 10 hour minimum", expanded.getExpansion().getContains().get(1).getDisplay()); + assertThat(toCodes(expanded), is(equalTo(expandedConcepts.subList(1,24)))); } @@ -718,6 +714,7 @@ public class ResourceProviderR5ValueSetTest extends BaseResourceProviderR5Test { ValueSet toExpand = loadResourceFromClasspath(ValueSet.class, "/extensional-case-3-vs.xml"); + List expandedConcepts = getExpandedConceptsByValueSetUrl("http://www.healthintersections.com.au/fhir/ValueSet/extensional-case-2"); Parameters respParam = myClient .operation() .onType(ValueSet.class) @@ -737,9 +734,7 @@ public class ResourceProviderR5ValueSetTest extends BaseResourceProviderR5Test { assertEquals("count", expanded.getExpansion().getParameter().get(1).getName()); assertEquals(1, expanded.getExpansion().getParameter().get(1).getValueIntegerType().getValue().intValue()); assertEquals(1, expanded.getExpansion().getContains().size()); - assertEquals("http://acme.org", expanded.getExpansion().getContainsFirstRep().getSystem()); - assertEquals("8450-9", expanded.getExpansion().getContainsFirstRep().getCode()); - assertEquals("Systolic blood pressure--expiration", expanded.getExpansion().getContainsFirstRep().getDisplay()); + assertThat(toCodes(expanded), is(equalTo(expandedConcepts.subList(0,1)))); } @@ -752,6 +747,7 @@ public class ResourceProviderR5ValueSetTest extends BaseResourceProviderR5Test { ValueSet toExpand = loadResourceFromClasspath(ValueSet.class, "/extensional-case-3-vs.xml"); + List expandedConcepts = getExpandedConceptsByValueSetUrl("http://www.healthintersections.com.au/fhir/ValueSet/extensional-case-2"); Parameters respParam = myClient .operation() .onType(ValueSet.class) @@ -772,10 +768,7 @@ public class ResourceProviderR5ValueSetTest extends BaseResourceProviderR5Test { assertEquals("count", expanded.getExpansion().getParameter().get(1).getName()); assertEquals(1, expanded.getExpansion().getParameter().get(1).getValueIntegerType().getValue().intValue()); assertEquals(1, expanded.getExpansion().getContains().size()); - assertEquals("http://acme.org", expanded.getExpansion().getContainsFirstRep().getSystem()); - assertEquals("11378-7", expanded.getExpansion().getContainsFirstRep().getCode()); - assertEquals("Systolic blood pressure at First encounter", expanded.getExpansion().getContainsFirstRep().getDisplay()); - + assertThat(toCodes(expanded), is(equalTo(expandedConcepts.subList(1,2)))); } @Test @@ -1141,67 +1134,17 @@ public class ResourceProviderR5ValueSetTest extends BaseResourceProviderR5Test { assertTrue(optionalValueSetByUrl.isPresent()); TermValueSet termValueSet = optionalValueSetByUrl.get(); - assertSame(optionalValueSetByResourcePid.get(), termValueSet); - ourLog.info("ValueSet:\n" + termValueSet.toString()); - assertEquals("http://www.healthintersections.com.au/fhir/ValueSet/extensional-case-2", termValueSet.getUrl()); - assertEquals(theValueSetName, termValueSet.getName()); - assertEquals(theCodeSystem.getConcept().size(), termValueSet.getConcepts().size()); - assertEquals(TermValueSetPreExpansionStatusEnum.EXPANDED, termValueSet.getExpansionStatus()); - TermValueSetConcept concept = termValueSet.getConcepts().get(0); - ourLog.info("Concept:\n" + concept.toString()); - assertEquals("http://acme.org", concept.getSystem()); - assertEquals("8450-9", concept.getCode()); - assertEquals("Systolic blood pressure--expiration", concept.getDisplay()); - assertEquals(2, concept.getDesignations().size()); - assertEquals(0, concept.getOrder()); + TermValueSetConcept concept = assertTermValueSetContainsConceptAndIsInDeclaredOrder(termValueSet, "http://acme.org", "8450-9", "Systolic blood pressure--expiration", 2); + assertTermConceptContainsDesignation(concept, "nl", "http://snomed.info/sct", "900000000000013009", "Synonym", "Systolische bloeddruk - expiratie"); + assertTermConceptContainsDesignation(concept, "sv", "http://snomed.info/sct", "900000000000013009", "Synonym", "Systoliskt blodtryck - utgång"); - TermValueSetConceptDesignation designation = concept.getDesignations().get(0); - assertEquals("nl", designation.getLanguage()); - assertEquals("http://snomed.info/sct", designation.getUseSystem()); - assertEquals("900000000000013009", designation.getUseCode()); - assertEquals("Synonym", designation.getUseDisplay()); - assertEquals("Systolische bloeddruk - expiratie", designation.getValue()); + assertTermValueSetContainsConceptAndIsInDeclaredOrder(termValueSet, "http://acme.org", "11378-7", "Systolic blood pressure at First encounter", 0); - designation = concept.getDesignations().get(1); - assertEquals("sv", designation.getLanguage()); - assertEquals("http://snomed.info/sct", designation.getUseSystem()); - assertEquals("900000000000013009", designation.getUseCode()); - assertEquals("Synonym", designation.getUseDisplay()); - assertEquals("Systoliskt blodtryck - utgång", designation.getValue()); + TermValueSetConcept otherConcept = assertTermValueSetContainsConceptAndIsInDeclaredOrder(termValueSet, "http://acme.org", "8491-3", "Systolic blood pressure 1 hour minimum", 1); + assertTermConceptContainsDesignation(otherConcept, "nl", "http://snomed.info/sct", "900000000000013009", "Synonym", "Systolische bloeddruk minimaal 1 uur"); - concept = termValueSet.getConcepts().get(1); - ourLog.info("Concept:\n" + concept.toString()); - assertEquals("http://acme.org", concept.getSystem()); - assertEquals("11378-7", concept.getCode()); - assertEquals("Systolic blood pressure at First encounter", concept.getDisplay()); - assertEquals(0, concept.getDesignations().size()); - assertEquals(1, concept.getOrder()); - - // ... - - concept = termValueSet.getConcepts().get(22); - ourLog.info("Concept:\n" + concept.toString()); - assertEquals("http://acme.org", concept.getSystem()); - assertEquals("8491-3", concept.getCode()); - assertEquals("Systolic blood pressure 1 hour minimum", concept.getDisplay()); - assertEquals(1, concept.getDesignations().size()); - assertEquals(22, concept.getOrder()); - - designation = concept.getDesignations().get(0); - assertEquals("nl", designation.getLanguage()); - assertEquals("http://snomed.info/sct", designation.getUseSystem()); - assertEquals("900000000000013009", designation.getUseCode()); - assertEquals("Synonym", designation.getUseDisplay()); - assertEquals("Systolische bloeddruk minimaal 1 uur", designation.getValue()); - - concept = termValueSet.getConcepts().get(23); - ourLog.info("Concept:\n" + concept.toString()); - assertEquals("http://acme.org", concept.getSystem()); - assertEquals("8492-1", concept.getCode()); - assertEquals("Systolic blood pressure 8 hour minimum", concept.getDisplay()); - assertEquals(0, concept.getDesignations().size()); - assertEquals(23, concept.getOrder()); + assertTermValueSetContainsConceptAndIsInDeclaredOrder(termValueSet, "http://acme.org", "8492-1", "Systolic blood pressure 8 hour minimum", 0); }); } diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r5/ResourceProviderR5ValueSetVersionedTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r5/ResourceProviderR5ValueSetVersionedTest.java index 36ed4a694bf..aaff4459182 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r5/ResourceProviderR5ValueSetVersionedTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/provider/r5/ResourceProviderR5ValueSetVersionedTest.java @@ -1097,64 +1097,18 @@ public class ResourceProviderR5ValueSetVersionedTest extends BaseResourceProvide assertEquals(theCodeSystem.getConcept().size(), termValueSet.getConcepts().size()); assertEquals(TermValueSetPreExpansionStatusEnum.EXPANDED, termValueSet.getExpansionStatus()); - TermValueSetConcept concept = termValueSet.getConcepts().get(0); - ourLog.info("Concept:\n" + concept.toString()); - assertEquals("http://acme.org", concept.getSystem()); - assertEquals("1", concept.getSystemVersion()); - assertEquals("8450-9", concept.getCode()); - assertEquals("Systolic blood pressure--expiration", concept.getDisplay()); - assertEquals(2, concept.getDesignations().size()); - assertEquals(0, concept.getOrder()); + TermValueSetConcept concept = assertTermValueSetContainsConceptAndIsInDeclaredOrder(termValueSet, "http://acme.org", "8450-9", "Systolic blood pressure--expiration", 2); + assertTermConceptContainsDesignation(concept, "nl", "http://snomed.info/sct", "900000000000013009", "Synonym", "Systolische bloeddruk - expiratie"); + assertTermConceptContainsDesignation(concept, "sv", "http://snomed.info/sct", "900000000000013009", "Synonym", "Systoliskt blodtryck - utgång"); - TermValueSetConceptDesignation designation = concept.getDesignations().get(0); - assertEquals("nl", designation.getLanguage()); - assertEquals("http://snomed.info/sct", designation.getUseSystem()); - assertEquals("900000000000013009", designation.getUseCode()); - assertEquals("Synonym", designation.getUseDisplay()); - assertEquals("Systolische bloeddruk - expiratie", designation.getValue()); - - designation = concept.getDesignations().get(1); - assertEquals("sv", designation.getLanguage()); - assertEquals("http://snomed.info/sct", designation.getUseSystem()); - assertEquals("900000000000013009", designation.getUseCode()); - assertEquals("Synonym", designation.getUseDisplay()); - assertEquals("Systoliskt blodtryck - utgång", designation.getValue()); - - concept = termValueSet.getConcepts().get(1); - ourLog.info("Concept:\n" + concept.toString()); - assertEquals("http://acme.org", concept.getSystem()); - assertEquals("1", concept.getSystemVersion()); - assertEquals("11378-7", concept.getCode()); - assertEquals("Systolic blood pressure at First encounter", concept.getDisplay()); - assertEquals(0, concept.getDesignations().size()); - assertEquals(1, concept.getOrder()); + assertTermValueSetContainsConceptAndIsInDeclaredOrder(termValueSet, "http://acme.org", "11378-7", "Systolic blood pressure at First encounter", 0); // ... - concept = termValueSet.getConcepts().get(22); - ourLog.info("Concept:\n" + concept.toString()); - assertEquals("http://acme.org", concept.getSystem()); - assertEquals("1", concept.getSystemVersion()); - assertEquals("8491-3", concept.getCode()); - assertEquals("Systolic blood pressure 1 hour minimum", concept.getDisplay()); - assertEquals(1, concept.getDesignations().size()); - assertEquals(22, concept.getOrder()); + TermValueSetConcept otherConcept = assertTermValueSetContainsConceptAndIsInDeclaredOrder(termValueSet, "http://acme.org", "8491-3", "Systolic blood pressure 1 hour minimum", 1); + assertTermConceptContainsDesignation(otherConcept, "nl", "http://snomed.info/sct", "900000000000013009", "Synonym", "Systolische bloeddruk minimaal 1 uur"); - designation = concept.getDesignations().get(0); - assertEquals("nl", designation.getLanguage()); - assertEquals("http://snomed.info/sct", designation.getUseSystem()); - assertEquals("900000000000013009", designation.getUseCode()); - assertEquals("Synonym", designation.getUseDisplay()); - assertEquals("Systolische bloeddruk minimaal 1 uur", designation.getValue()); - - concept = termValueSet.getConcepts().get(23); - ourLog.info("Concept:\n" + concept.toString()); - assertEquals("http://acme.org", concept.getSystem()); - assertEquals("1", concept.getSystemVersion()); - assertEquals("8492-1", concept.getCode()); - assertEquals("Systolic blood pressure 8 hour minimum", concept.getDisplay()); - assertEquals(0, concept.getDesignations().size()); - assertEquals(23, concept.getOrder()); + assertTermValueSetContainsConceptAndIsInDeclaredOrder(termValueSet, "http://acme.org", "8492-1", "Systolic blood pressure 8 hour minimum", 0); }); } @@ -1174,64 +1128,18 @@ public class ResourceProviderR5ValueSetVersionedTest extends BaseResourceProvide assertEquals(theCodeSystem.getConcept().size(), termValueSet.getConcepts().size()); assertEquals(TermValueSetPreExpansionStatusEnum.EXPANDED, termValueSet.getExpansionStatus()); - TermValueSetConcept concept = termValueSet.getConcepts().get(0); - ourLog.info("Concept:\n" + concept.toString()); - assertEquals("http://acme.org", concept.getSystem()); - assertEquals("2", concept.getSystemVersion()); - assertEquals("8450-9", concept.getCode()); - assertEquals("Systolic blood pressure--expiration v2", concept.getDisplay()); - assertEquals(2, concept.getDesignations().size()); - assertEquals(0, concept.getOrder()); + TermValueSetConcept concept = assertTermValueSetContainsConceptAndIsInDeclaredOrder(termValueSet, "http://acme.org", "8450-9", "Systolic blood pressure--expiration v2", 2); + assertTermConceptContainsDesignation(concept, "nl", "http://snomed.info/sct", "900000000000013009", "Synonym", "Systolische bloeddruk - expiratie"); + assertTermConceptContainsDesignation(concept, "sv", "http://snomed.info/sct", "900000000000013009", "Synonym", "Systoliskt blodtryck - utgång"); - TermValueSetConceptDesignation designation = concept.getDesignations().get(0); - assertEquals("nl", designation.getLanguage()); - assertEquals("http://snomed.info/sct", designation.getUseSystem()); - assertEquals("900000000000013009", designation.getUseCode()); - assertEquals("Synonym", designation.getUseDisplay()); - assertEquals("Systolische bloeddruk - expiratie", designation.getValue()); - - designation = concept.getDesignations().get(1); - assertEquals("sv", designation.getLanguage()); - assertEquals("http://snomed.info/sct", designation.getUseSystem()); - assertEquals("900000000000013009", designation.getUseCode()); - assertEquals("Synonym", designation.getUseDisplay()); - assertEquals("Systoliskt blodtryck - utgång", designation.getValue()); - - concept = termValueSet.getConcepts().get(1); - ourLog.info("Concept:\n" + concept.toString()); - assertEquals("http://acme.org", concept.getSystem()); - assertEquals("2", concept.getSystemVersion()); - assertEquals("11378-7", concept.getCode()); - assertEquals("Systolic blood pressure at First encounter v2", concept.getDisplay()); - assertEquals(0, concept.getDesignations().size()); - assertEquals(1, concept.getOrder()); + assertTermValueSetContainsConceptAndIsInDeclaredOrder(termValueSet, "http://acme.org", "11378-7", "Systolic blood pressure at First encounter v2", 0); // ... - concept = termValueSet.getConcepts().get(22); - ourLog.info("Concept:\n" + concept.toString()); - assertEquals("http://acme.org", concept.getSystem()); - assertEquals("2", concept.getSystemVersion()); - assertEquals("8491-3", concept.getCode()); - assertEquals("Systolic blood pressure 1 hour minimum v2", concept.getDisplay()); - assertEquals(1, concept.getDesignations().size()); - assertEquals(22, concept.getOrder()); + TermValueSetConcept otherConcept = assertTermValueSetContainsConceptAndIsInDeclaredOrder(termValueSet, "http://acme.org", "8491-3", "Systolic blood pressure 1 hour minimum v2", 1); + assertTermConceptContainsDesignation(otherConcept, "nl", "http://snomed.info/sct", "900000000000013009", "Synonym", "Systolische bloeddruk minimaal 1 uur"); - designation = concept.getDesignations().get(0); - assertEquals("nl", designation.getLanguage()); - assertEquals("http://snomed.info/sct", designation.getUseSystem()); - assertEquals("900000000000013009", designation.getUseCode()); - assertEquals("Synonym", designation.getUseDisplay()); - assertEquals("Systolische bloeddruk minimaal 1 uur", designation.getValue()); - - concept = termValueSet.getConcepts().get(23); - ourLog.info("Concept:\n" + concept.toString()); - assertEquals("http://acme.org", concept.getSystem()); - assertEquals("2", concept.getSystemVersion()); - assertEquals("8492-1", concept.getCode()); - assertEquals("Systolic blood pressure 8 hour minimum v2", concept.getDisplay()); - assertEquals(0, concept.getDesignations().size()); - assertEquals(23, concept.getOrder()); + assertTermValueSetContainsConceptAndIsInDeclaredOrder(termValueSet, "http://acme.org", "8492-1", "Systolic blood pressure 8 hour minimum v2", 0); }); } diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/builder/sql/SearchQueryBuilderMySqlTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/builder/sql/SearchQueryBuilderMySqlTest.java index a040ea1d3da..3998c78a51d 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/builder/sql/SearchQueryBuilderMySqlTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/builder/sql/SearchQueryBuilderMySqlTest.java @@ -2,7 +2,7 @@ package ca.uhn.fhir.jpa.search.builder.sql; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.interceptor.model.RequestPartitionId; -import ca.uhn.fhir.jpa.config.HibernateDialectProvider; +import ca.uhn.fhir.jpa.config.HibernatePropertiesProvider; import ca.uhn.fhir.jpa.model.config.PartitionSettings; import ca.uhn.fhir.jpa.model.entity.ModelConfig; import ca.uhn.fhir.jpa.search.builder.predicate.BaseJoiningPredicateBuilder; @@ -25,18 +25,18 @@ public class SearchQueryBuilderMySqlTest { @Mock private SqlObjectFactory mySqlObjectFactory; @Mock - private HibernateDialectProvider myHibernateDialectProvider; + private HibernatePropertiesProvider myHibernatePropertiesProvider; private final FhirContext myFhirContext = FhirContext.forR4(); @BeforeEach public void beforeInitMocks() { MockitoAnnotations.initMocks(this); - when(myHibernateDialectProvider.getDialect()).thenReturn(new org.hibernate.dialect.MySQL57Dialect()); + when(myHibernatePropertiesProvider.getDialect()).thenReturn(new org.hibernate.dialect.MySQL57Dialect()); } private SearchQueryBuilder createSearchQueryBuilder() { - return new SearchQueryBuilder(myFhirContext, new ModelConfig(), new PartitionSettings(), RequestPartitionId.allPartitions(), "Patient", mySqlObjectFactory, myHibernateDialectProvider, false); + return new SearchQueryBuilder(myFhirContext, new ModelConfig(), new PartitionSettings(), RequestPartitionId.allPartitions(), "Patient", mySqlObjectFactory, myHibernatePropertiesProvider, false); } @Test diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/builder/sql/SearchQueryBuilderTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/builder/sql/SearchQueryBuilderTest.java index 8ae2e33d00d..7e535aa8c6a 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/builder/sql/SearchQueryBuilderTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/builder/sql/SearchQueryBuilderTest.java @@ -3,7 +3,7 @@ package ca.uhn.fhir.jpa.search.builder.sql; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.FhirVersionEnum; import ca.uhn.fhir.interceptor.model.RequestPartitionId; -import ca.uhn.fhir.jpa.config.HibernateDialectProvider; +import ca.uhn.fhir.jpa.config.HibernatePropertiesProvider; import ca.uhn.fhir.jpa.model.config.PartitionSettings; import ca.uhn.fhir.jpa.model.entity.ModelConfig; import ca.uhn.fhir.jpa.search.builder.predicate.ResourceTablePredicateBuilder; @@ -52,7 +52,7 @@ public class SearchQueryBuilderTest { @Test public void testRangeSqlServer2005_NoSort() { - HibernateDialectProvider dialectProvider = new HibernateDialectProvider(); + HibernatePropertiesProvider dialectProvider = new HibernatePropertiesProvider(); dialectProvider.setDialectForUnitTest(new SQLServer2005Dialect()); SearchQueryBuilder builder = new SearchQueryBuilder(myFhirContext, myModelConfig, myPartitionSettings, myRequestPartitionId, "Patient", mySqlBuilderFactory, dialectProvider, false); builder.addResourceIdsPredicate(Lists.newArrayList(500L, 501L)); @@ -78,7 +78,7 @@ public class SearchQueryBuilderTest { @Test public void testRangeSqlServer2005_WithSort() { - HibernateDialectProvider dialectProvider = new HibernateDialectProvider(); + HibernatePropertiesProvider dialectProvider = new HibernatePropertiesProvider(); dialectProvider.setDialectForUnitTest(new SQLServer2005Dialect()); SearchQueryBuilder builder = new SearchQueryBuilder(myFhirContext, myModelConfig, myPartitionSettings, myRequestPartitionId, "Patient", mySqlBuilderFactory, dialectProvider, false); builder.addResourceIdsPredicate(Lists.newArrayList(500L, 501L)); @@ -109,7 +109,7 @@ public class SearchQueryBuilderTest { @Test public void testRangeSqlServer2012_NoSort() { - HibernateDialectProvider dialectProvider = new HibernateDialectProvider(); + HibernatePropertiesProvider dialectProvider = new HibernatePropertiesProvider(); dialectProvider.setDialectForUnitTest(new SQLServer2012Dialect()); SearchQueryBuilder builder = new SearchQueryBuilder(myFhirContext, myModelConfig, myPartitionSettings, myRequestPartitionId, "Patient", mySqlBuilderFactory, dialectProvider, false); builder.addResourceIdsPredicate(Lists.newArrayList(500L, 501L)); @@ -135,7 +135,7 @@ public class SearchQueryBuilderTest { @Test public void testRangeSqlServer2012_WithSort() { - HibernateDialectProvider dialectProvider = new HibernateDialectProvider(); + HibernatePropertiesProvider dialectProvider = new HibernatePropertiesProvider(); dialectProvider.setDialectForUnitTest(new SQLServer2012Dialect()); SearchQueryBuilder builder = new SearchQueryBuilder(myFhirContext, myModelConfig, myPartitionSettings, myRequestPartitionId, "Patient", mySqlBuilderFactory, dialectProvider, false); builder.addResourceIdsPredicate(Lists.newArrayList(500L, 501L)); @@ -165,7 +165,7 @@ public class SearchQueryBuilderTest { @Test public void testRangePostgreSQL95_NoSort() { - HibernateDialectProvider dialectProvider = new HibernateDialectProvider(); + HibernatePropertiesProvider dialectProvider = new HibernatePropertiesProvider(); dialectProvider.setDialectForUnitTest(new PostgreSQL95Dialect()); SearchQueryBuilder builder = new SearchQueryBuilder(myFhirContext, myModelConfig, myPartitionSettings, myRequestPartitionId, "Patient", mySqlBuilderFactory, dialectProvider, false); builder.addResourceIdsPredicate(Lists.newArrayList(500L, 501L)); @@ -191,7 +191,7 @@ public class SearchQueryBuilderTest { @Test public void testRangePostgreSQL95_WithSort() { - HibernateDialectProvider dialectProvider = new HibernateDialectProvider(); + HibernatePropertiesProvider dialectProvider = new HibernatePropertiesProvider(); dialectProvider.setDialectForUnitTest(new PostgreSQL95Dialect()); SearchQueryBuilder builder = new SearchQueryBuilder(myFhirContext, myModelConfig, myPartitionSettings, myRequestPartitionId, "Patient", mySqlBuilderFactory, dialectProvider, false); builder.addResourceIdsPredicate(Lists.newArrayList(500L, 501L)); @@ -218,7 +218,7 @@ public class SearchQueryBuilderTest { @Test public void testRangeOracle12c_NoSort() { - HibernateDialectProvider dialectProvider = new HibernateDialectProvider(); + HibernatePropertiesProvider dialectProvider = new HibernatePropertiesProvider(); dialectProvider.setDialectForUnitTest(new Oracle12cDialect()); SearchQueryBuilder builder = new SearchQueryBuilder(myFhirContext, myModelConfig, myPartitionSettings, myRequestPartitionId, "Patient", mySqlBuilderFactory, dialectProvider, false); builder.addResourceIdsPredicate(Lists.newArrayList(500L, 501L)); @@ -244,7 +244,7 @@ public class SearchQueryBuilderTest { @Test public void testRangeOracle12c_WithSort() { - HibernateDialectProvider dialectProvider = new HibernateDialectProvider(); + HibernatePropertiesProvider dialectProvider = new HibernatePropertiesProvider(); dialectProvider.setDialectForUnitTest(new Oracle12cDialect()); SearchQueryBuilder builder = new SearchQueryBuilder(myFhirContext, myModelConfig, myPartitionSettings, myRequestPartitionId, "Patient", mySqlBuilderFactory, dialectProvider, false); builder.addResourceIdsPredicate(Lists.newArrayList(500L, 501L)); @@ -268,11 +268,10 @@ public class SearchQueryBuilderTest { } - @Test public void testRangeMySQL8_NoSort() { - HibernateDialectProvider dialectProvider = new HibernateDialectProvider(); + HibernatePropertiesProvider dialectProvider = new HibernatePropertiesProvider(); dialectProvider.setDialectForUnitTest(new MySQL8Dialect()); SearchQueryBuilder builder = new SearchQueryBuilder(myFhirContext, myModelConfig, myPartitionSettings, myRequestPartitionId, "Patient", mySqlBuilderFactory, dialectProvider, false); builder.addResourceIdsPredicate(Lists.newArrayList(500L, 501L)); @@ -298,7 +297,7 @@ public class SearchQueryBuilderTest { @Test public void testRangeMySQL8_WithSort() { - HibernateDialectProvider dialectProvider = new HibernateDialectProvider(); + HibernatePropertiesProvider dialectProvider = new HibernatePropertiesProvider(); dialectProvider.setDialectForUnitTest(new MySQL8Dialect()); SearchQueryBuilder builder = new SearchQueryBuilder(myFhirContext, myModelConfig, myPartitionSettings, myRequestPartitionId, "Patient", mySqlBuilderFactory, dialectProvider, false); builder.addResourceIdsPredicate(Lists.newArrayList(500L, 501L)); @@ -329,7 +328,7 @@ public class SearchQueryBuilderTest { @Test public void testRangeMariaDB103_NoSort() { - HibernateDialectProvider dialectProvider = new HibernateDialectProvider(); + HibernatePropertiesProvider dialectProvider = new HibernatePropertiesProvider(); dialectProvider.setDialectForUnitTest(new MariaDB103Dialect()); SearchQueryBuilder builder = new SearchQueryBuilder(myFhirContext, myModelConfig, myPartitionSettings, myRequestPartitionId, "Patient", mySqlBuilderFactory, dialectProvider, false); builder.addResourceIdsPredicate(Lists.newArrayList(500L, 501L)); @@ -355,7 +354,7 @@ public class SearchQueryBuilderTest { @Test public void testRangeMariaDB103_WithSort() { - HibernateDialectProvider dialectProvider = new HibernateDialectProvider(); + HibernatePropertiesProvider dialectProvider = new HibernatePropertiesProvider(); dialectProvider.setDialectForUnitTest(new MariaDB103Dialect()); SearchQueryBuilder builder = new SearchQueryBuilder(myFhirContext, myModelConfig, myPartitionSettings, myRequestPartitionId, "Patient", mySqlBuilderFactory, dialectProvider, false); builder.addResourceIdsPredicate(Lists.newArrayList(500L, 501L)); @@ -386,7 +385,7 @@ public class SearchQueryBuilderTest { @Test public void testRangeDerbyTenSeven_NoSort() { - HibernateDialectProvider dialectProvider = new HibernateDialectProvider(); + HibernatePropertiesProvider dialectProvider = new HibernatePropertiesProvider(); dialectProvider.setDialectForUnitTest(new DerbyTenSevenDialect()); SearchQueryBuilder builder = new SearchQueryBuilder(myFhirContext, myModelConfig, myPartitionSettings, myRequestPartitionId, "Patient", mySqlBuilderFactory, dialectProvider, false); builder.addResourceIdsPredicate(Lists.newArrayList(500L, 501L)); @@ -412,7 +411,7 @@ public class SearchQueryBuilderTest { @Test public void testRangeDerbyTenSeven_WithSort() { - HibernateDialectProvider dialectProvider = new HibernateDialectProvider(); + HibernatePropertiesProvider dialectProvider = new HibernatePropertiesProvider(); dialectProvider.setDialectForUnitTest(new DerbyTenSevenDialect()); SearchQueryBuilder builder = new SearchQueryBuilder(myFhirContext, myModelConfig, myPartitionSettings, myRequestPartitionId, "Patient", mySqlBuilderFactory, dialectProvider, false); builder.addResourceIdsPredicate(Lists.newArrayList(500L, 501L)); diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/lastn/LastNElasticsearchSvcMultipleObservationsIT.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/lastn/LastNElasticsearchSvcMultipleObservationsIT.java index 7c7e0d09b81..29a0c5bc70c 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/lastn/LastNElasticsearchSvcMultipleObservationsIT.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/lastn/LastNElasticsearchSvcMultipleObservationsIT.java @@ -2,7 +2,8 @@ package ca.uhn.fhir.jpa.search.lastn; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.FhirVersionEnum; -import ca.uhn.fhir.jpa.search.lastn.config.TestElasticsearchConfig; +import ca.uhn.fhir.jpa.model.config.PartitionSettings; +import ca.uhn.fhir.jpa.search.lastn.config.TestElasticsearchContainerHelper; import ca.uhn.fhir.jpa.search.lastn.json.CodeJson; import ca.uhn.fhir.jpa.search.lastn.json.ObservationJson; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; @@ -26,9 +27,19 @@ import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; +import org.junit.runner.RunWith; +import org.mockito.Mock; +import org.mockito.junit.MockitoJUnitRunner; +import org.mockito.junit.jupiter.MockitoExtension; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.mock.mockito.MockBean; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit.jupiter.SpringExtension; +import org.testcontainers.elasticsearch.ElasticsearchContainer; +import org.testcontainers.junit.jupiter.Container; +import org.testcontainers.junit.jupiter.Testcontainers; import java.io.IOException; import java.util.ArrayList; @@ -47,7 +58,7 @@ import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertTrue; @ExtendWith(SpringExtension.class) -@ContextConfiguration(classes = {TestElasticsearchConfig.class}) +@Testcontainers public class LastNElasticsearchSvcMultipleObservationsIT { static private final Calendar baseObservationDate = new GregorianCalendar(); @@ -58,11 +69,18 @@ public class LastNElasticsearchSvcMultipleObservationsIT { private final Map>> createdPatientObservationMap = new HashMap<>(); private final FhirContext myFhirContext = FhirContext.forCached(FhirVersionEnum.R4); - @Autowired + + @Container + public static ElasticsearchContainer elasticsearchContainer = TestElasticsearchContainerHelper.getEmbeddedElasticSearch(); + private ElasticsearchSvcImpl elasticsearchSvc; @BeforeEach public void before() throws IOException { + PartitionSettings partitionSettings = new PartitionSettings(); + partitionSettings.setPartitioningEnabled(false); + elasticsearchSvc = new ElasticsearchSvcImpl(partitionSettings, elasticsearchContainer.getHost(), elasticsearchContainer.getMappedPort(9200), "", ""); + if (!indexLoaded) { createMultiplePatientsAndObservations(); indexLoaded = true; diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/lastn/LastNElasticsearchSvcSingleObservationIT.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/lastn/LastNElasticsearchSvcSingleObservationIT.java index aba1356d20a..3313c1ee111 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/lastn/LastNElasticsearchSvcSingleObservationIT.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/lastn/LastNElasticsearchSvcSingleObservationIT.java @@ -2,8 +2,9 @@ package ca.uhn.fhir.jpa.search.lastn; import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.FhirVersionEnum; +import ca.uhn.fhir.jpa.model.config.PartitionSettings; import ca.uhn.fhir.jpa.model.util.CodeSystemHash; -import ca.uhn.fhir.jpa.search.lastn.config.TestElasticsearchConfig; +import ca.uhn.fhir.jpa.search.lastn.config.TestElasticsearchContainerHelper; import ca.uhn.fhir.jpa.search.lastn.json.CodeJson; import ca.uhn.fhir.jpa.search.lastn.json.ObservationJson; import ca.uhn.fhir.jpa.searchparam.SearchParameterMap; @@ -22,23 +23,31 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.SerializationFeature; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit.jupiter.SpringExtension; +import org.testcontainers.elasticsearch.ElasticsearchContainer; +import org.testcontainers.junit.jupiter.Container; +import org.testcontainers.junit.jupiter.Testcontainers; import java.io.IOException; +import java.time.Duration; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.UUID; +import static java.time.temporal.ChronoUnit.SECONDS; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertTrue; @ExtendWith(SpringExtension.class) -@ContextConfiguration(classes = {TestElasticsearchConfig.class}) +@Testcontainers public class LastNElasticsearchSvcSingleObservationIT { static ObjectMapper ourMapperNonPrettyPrint; @@ -75,9 +84,20 @@ public class LastNElasticsearchSvcSingleObservationIT { final String CODEFIRSTCODINGCODE = "test-code"; final String CODEFIRSTCODINGDISPLAY = "test-code display"; final FhirContext myFhirContext = FhirContext.forCached(FhirVersionEnum.R4); - @Autowired + ElasticsearchSvcImpl elasticsearchSvc; + @Container + public static ElasticsearchContainer elasticsearchContainer = TestElasticsearchContainerHelper.getEmbeddedElasticSearch(); + + + @BeforeEach + public void before() { + PartitionSettings partitionSettings = new PartitionSettings(); + partitionSettings.setPartitioningEnabled(false); + elasticsearchSvc = new ElasticsearchSvcImpl(partitionSettings, elasticsearchContainer.getHost(), elasticsearchContainer.getMappedPort(9200), "", ""); + } + @AfterEach public void after() throws IOException { elasticsearchSvc.deleteAllDocumentsForTest(ElasticsearchSvcImpl.OBSERVATION_INDEX); diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/lastn/config/TestElasticsearchConfig.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/lastn/config/TestElasticsearchConfig.java deleted file mode 100644 index b1a489a3fba..00000000000 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/lastn/config/TestElasticsearchConfig.java +++ /dev/null @@ -1,62 +0,0 @@ -package ca.uhn.fhir.jpa.search.lastn.config; - -import ca.uhn.fhir.context.ConfigurationException; -import ca.uhn.fhir.jpa.model.config.PartitionSettings; -import ca.uhn.fhir.jpa.search.lastn.ElasticsearchSvcImpl; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; -import pl.allegro.tech.embeddedelasticsearch.EmbeddedElastic; -import pl.allegro.tech.embeddedelasticsearch.PopularProperties; - -import javax.annotation.PreDestroy; -import java.io.IOException; -import java.util.UUID; -import java.util.concurrent.TimeUnit; - -@Configuration -public class TestElasticsearchConfig { - - private final String elasticsearchHost = "localhost"; - private final String elasticsearchUserId = ""; - private final String elasticsearchPassword = ""; - - private static final String ELASTIC_VERSION = "6.5.4"; - - @Bean - public PartitionSettings partitionSettings() { - return new PartitionSettings(); - } - - - @Bean() - public ElasticsearchSvcImpl elasticsearchSvc() { - int elasticsearchPort = embeddedElasticSearch().getHttpPort(); - return new ElasticsearchSvcImpl(elasticsearchHost, elasticsearchPort, elasticsearchUserId, elasticsearchPassword); - } - - @Bean - public EmbeddedElastic embeddedElasticSearch() { - EmbeddedElastic embeddedElastic; - try { - embeddedElastic = EmbeddedElastic.builder() - .withElasticVersion(ELASTIC_VERSION) - .withSetting(PopularProperties.TRANSPORT_TCP_PORT, 0) - .withSetting(PopularProperties.HTTP_PORT, 0) - .withSetting(PopularProperties.CLUSTER_NAME, UUID.randomUUID()) - .withStartTimeout(60, TimeUnit.SECONDS) - .build() - .start(); - } catch (IOException | InterruptedException e) { - throw new ConfigurationException(e); - } - - return embeddedElastic; - } - - @PreDestroy - public void stop() throws IOException { - elasticsearchSvc().close(); - embeddedElasticSearch().stop(); - } - -} diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/lastn/config/TestElasticsearchContainerHelper.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/lastn/config/TestElasticsearchContainerHelper.java new file mode 100644 index 00000000000..570d9565fec --- /dev/null +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/search/lastn/config/TestElasticsearchContainerHelper.java @@ -0,0 +1,20 @@ +package ca.uhn.fhir.jpa.search.lastn.config; + +import org.testcontainers.elasticsearch.ElasticsearchContainer; + +import java.time.Duration; + +import static java.time.temporal.ChronoUnit.SECONDS; + +public class TestElasticsearchContainerHelper { + + + public static final String ELASTICSEARCH_VERSION = "7.10.0"; + public static final String ELASTICSEARCH_IMAGE = "docker.elastic.co/elasticsearch/elasticsearch:" + ELASTICSEARCH_VERSION; + + public static ElasticsearchContainer getEmbeddedElasticSearch() { + return new ElasticsearchContainer(ELASTICSEARCH_IMAGE) + .withStartupTimeout(Duration.of(300, SECONDS)); + } + +} diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/subscription/SubscriptionTestUtil.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/subscription/SubscriptionTestUtil.java index e8562ad45d4..358424a040d 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/subscription/SubscriptionTestUtil.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/subscription/SubscriptionTestUtil.java @@ -1,6 +1,7 @@ package ca.uhn.fhir.jpa.subscription; import ca.uhn.fhir.jpa.api.config.DaoConfig; +import ca.uhn.fhir.jpa.cache.IResourceChangeListenerCacheRefresher; import ca.uhn.fhir.jpa.subscription.channel.impl.LinkedBlockingChannel; import ca.uhn.fhir.jpa.subscription.submit.interceptor.SubscriptionSubmitInterceptorLoader; import ca.uhn.fhir.jpa.subscription.match.registry.ActiveSubscription; @@ -29,6 +30,8 @@ public class SubscriptionTestUtil { private SubscriptionRegistry mySubscriptionRegistry; @Autowired private SubscriptionChannelRegistry mySubscriptionChannelRegistry; + @Autowired + private IResourceChangeListenerCacheRefresher myResourceChangeListenerCacheRefresher; public int getExecutorQueueSize() { LinkedBlockingChannel channel = mySubscriptionMatcherInterceptor.getProcessingChannelForUnitTest(); @@ -46,6 +49,8 @@ public class SubscriptionTestUtil { ourLog.info("Executor work queue has {} items", getExecutorQueueSize()); } Thread.sleep(100); + + myResourceChangeListenerCacheRefresher.refreshExpiredCachesAndNotifyListeners(); } public void registerEmailInterceptor() { diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/subscription/email/EmailSubscriptionDstu2Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/subscription/email/EmailSubscriptionDstu2Test.java index 7e5c726baca..81770c06681 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/subscription/email/EmailSubscriptionDstu2Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/subscription/email/EmailSubscriptionDstu2Test.java @@ -2,6 +2,7 @@ package ca.uhn.fhir.jpa.subscription.email; import ca.uhn.fhir.jpa.provider.BaseResourceProviderDstu2Test; import ca.uhn.fhir.jpa.subscription.SubscriptionTestUtil; +import ca.uhn.fhir.jpa.subscription.match.registry.ActiveSubscription; import ca.uhn.fhir.model.dstu2.composite.CodeableConceptDt; import ca.uhn.fhir.model.dstu2.composite.CodingDt; import ca.uhn.fhir.model.dstu2.resource.Observation; @@ -13,8 +14,12 @@ import ca.uhn.fhir.rest.api.MethodOutcome; import com.icegreen.greenmail.util.GreenMail; import com.icegreen.greenmail.util.GreenMailUtil; import com.icegreen.greenmail.util.ServerSetup; +import org.hamcrest.Matchers; import org.hl7.fhir.instance.model.api.IIdType; -import org.junit.jupiter.api.*; import static org.hamcrest.MatcherAssert.assertThat; +import org.junit.jupiter.api.*; + +import static org.awaitility.Awaitility.await; +import static org.hamcrest.MatcherAssert.assertThat; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; @@ -115,6 +120,7 @@ public class EmailSubscriptionDstu2Test extends BaseResourceProviderDstu2Test { Subscription subscription1 = createSubscription(criteria1, payload, "to1@example.com,to2@example.com"); mySubscriptionTestUtil.waitForQueueToDrain(); + await().until(()->mySubscriptionRegistry.get(subscription1.getIdElement().getIdPart()), Matchers.not(Matchers.nullValue())); mySubscriptionTestUtil.setEmailSender(subscription1.getIdElement()); assertEquals(0, Arrays.asList(ourTestSmtp.getReceivedMessages()).size()); diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologyLoaderSvcLoincJpaTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologyLoaderSvcLoincJpaTest.java index 4d822fb9d6a..4a56a8934a9 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologyLoaderSvcLoincJpaTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologyLoaderSvcLoincJpaTest.java @@ -4,11 +4,13 @@ import ca.uhn.fhir.jpa.dao.r4.BaseJpaR4Test; import ca.uhn.fhir.jpa.entity.TermCodeSystem; import ca.uhn.fhir.jpa.entity.TermCodeSystemVersion; import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; import java.io.IOException; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.fail; public class TerminologyLoaderSvcLoincJpaTest extends BaseJpaR4Test { private TermLoaderSvcImpl mySvc; @@ -29,7 +31,10 @@ public class TerminologyLoaderSvcLoincJpaTest extends BaseJpaR4Test { // Load LOINC marked as version 2.67 TerminologyLoaderSvcLoincTest.addLoincMandatoryFilesWithPropertiesFileToZip(myFiles, "v267_loincupload.properties"); + + // FIXME: maybe add a count queries test? mySvc.loadLoinc(myFiles.getFiles(), mySrd); + myTerminologyDeferredStorageSvc.saveAllDeferred(); runInTransaction(() -> { @@ -53,6 +58,7 @@ public class TerminologyLoaderSvcLoincJpaTest extends BaseJpaR4Test { // Update LOINC marked as version 2.67 myFiles = new ZipCollectionBuilder(); TerminologyLoaderSvcLoincTest.addLoincMandatoryFilesWithPropertiesFileToZip(myFiles, "v267_loincupload.properties"); + mySvc.loadLoinc(myFiles.getFiles(), mySrd); myTerminologyDeferredStorageSvc.saveAllDeferred(); diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologySvcDeltaR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologySvcDeltaR4Test.java index 0ee4228d449..a03bed19c8b 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologySvcDeltaR4Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologySvcDeltaR4Test.java @@ -558,20 +558,27 @@ public class TerminologySvcDeltaR4Test extends BaseJpaR4Test { assertEquals(true, runInTransaction(() -> myTermSvc.findCode("http://foo/cs", "codeAAA").isPresent())); // Remove CodeA + runInTransaction(()->{ + ourLog.info("About to remove CodeA. Have codes:\n * {}", myTermConceptDao.findAll().stream().map(t->t.toString()).collect(Collectors.joining("\n * "))); + }); + + myCaptureQueriesListener.clear(); runInTransaction(()->{ CustomTerminologySet delta2 = new CustomTerminologySet(); delta2.addRootConcept("codeA"); myTermCodeSystemStorageSvc.applyDeltaCodeSystemsRemove("http://foo/cs", delta2); }); - myCaptureQueriesListener.logAllQueriesForCurrentThread(); + myCaptureQueriesListener.logAllQueries(); - ourLog.info("*** Done removing"); + runInTransaction(()->{ + ourLog.info("Done removing. Have codes:\n * {}", myTermConceptDao.findAll().stream().map(t->t.toString()).collect(Collectors.joining("\n * "))); + }); assertEquals(false, runInTransaction(() -> myTermSvc.findCode("http://foo/cs", "codeB").isPresent())); assertEquals(false, runInTransaction(() -> myTermSvc.findCode("http://foo/cs", "codeA").isPresent())); - assertEquals(false, runInTransaction(() -> myTermSvc.findCode("http://foo/cs", "codeAA").isPresent())); - assertEquals(false, runInTransaction(() -> myTermSvc.findCode("http://foo/cs", "codeAAA").isPresent())); + assertEquals(false, runInTransaction(() -> myTermSvc.findCode("http://foo/cs", "codeAA").isPresent())); //TODO GGG JA this assert fails. If you swap to `deleteByPid` it does not fail. + assertEquals(false, runInTransaction(() -> myTermSvc.findCode("http://foo/cs", "codeAAA").isPresent()));//And I assume this one does too. } diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologySvcImplDstu3Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologySvcImplDstu3Test.java index 332d7e42f2f..5d29b99a1d4 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologySvcImplDstu3Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologySvcImplDstu3Test.java @@ -145,10 +145,10 @@ public class TerminologySvcImplDstu3Test extends BaseJpaDstu3Test { TermCodeSystemVersion cs = new TermCodeSystemVersion(); cs.setResource(table); - TermConcept code1 = new TermConcept(cs, "50015-7"); - TermConcept code2 = new TermConcept(cs, "43343-3"); - TermConcept code3 = new TermConcept(cs, "43343-4"); - TermConcept code4 = new TermConcept(cs, "47239-9"); + TermConcept code1 = new TermConcept(cs, "50015-7"); // has -3 as a child + TermConcept code2 = new TermConcept(cs, "43343-3"); // has -4 as a child + TermConcept code3 = new TermConcept(cs, "43343-4"); //has no children + TermConcept code4 = new TermConcept(cs, "47239-9"); //has no children code1.addPropertyString("SYSTEM", "Bld/Bone mar^Donor"); code1.addPropertyCoding( @@ -1147,7 +1147,8 @@ public class TerminologySvcImplDstu3Test extends BaseJpaDstu3Test { .setValue("50015-7,43343-3,43343-4,47239-9"); outcome = myTermSvc.expandValueSet(null, vs); codes = toCodesContains(outcome.getExpansion().getContains()); - assertThat(codes, containsInAnyOrder("43343-4", "47239-9")); + + assertThat(codes.toString(), codes, containsInAnyOrder("43343-4", "47239-9")); } @Test diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologySvcImplR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologySvcImplR4Test.java index 5fe2a780584..507b397ed7a 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologySvcImplR4Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/TerminologySvcImplR4Test.java @@ -2003,10 +2003,11 @@ public class TerminologySvcImplR4Test extends BaseTermR4Test { codeSystem.setUrl(CS_URL_2); IIdType id_v2 = myCodeSystemDao.update(codeSystem, mySrd).getId().toUnqualified(); + myTerminologyDeferredStorageSvc.saveAllDeferred(); runInTransaction(() -> { List termCodeSystemVersions_updated = myTermCodeSystemVersionDao.findAll(); - assertEquals(termCodeSystemVersions_updated.size(), 1); + assertEquals(1, termCodeSystemVersions_updated.size()); TermCodeSystemVersion termCodeSystemVersion_2 = termCodeSystemVersions_updated.get(0); assertEquals(termCodeSystemVersion_2.getConcepts().size(), 2); Set termConcepts_updated = new HashSet<>(termCodeSystemVersion_2.getConcepts()); diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/ValueSetExpansionR4Test.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/ValueSetExpansionR4Test.java index 83ce64c910c..67d04ee1b6c 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/ValueSetExpansionR4Test.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/term/ValueSetExpansionR4Test.java @@ -38,8 +38,11 @@ import java.io.IOException; import java.util.List; import java.util.Optional; import java.util.stream.Collectors; +import java.util.stream.Stream; import static org.hamcrest.CoreMatchers.containsString; +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; @@ -180,7 +183,7 @@ public class ValueSetExpansionR4Test extends BaseTermR4Test { ValueSet expandedValueSet = myTermSvc.expandValueSet(new ValueSetExpansionOptions(), input); ourLog.debug("Expanded ValueSet:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(expandedValueSet)); - assertThat(toCodes(expandedValueSet).toString(), toCodes(expandedValueSet), contains( + assertThat(toCodes(expandedValueSet).toString(), toCodes(expandedValueSet), containsInAnyOrder( "code9", "code90", "code91", "code92", "code93", "code94", "code95", "code96", "code97", "code98", "code99" )); assertEquals(11, expandedValueSet.getExpansion().getContains().size(), toCodes(expandedValueSet).toString()); @@ -190,7 +193,6 @@ public class ValueSetExpansionR4Test extends BaseTermR4Test { List selectQueries = myCaptureQueriesListener.getSelectQueries(); String lastSelectQuery = selectQueries.get(selectQueries.size() - 1).getSql(true, true).toLowerCase(); assertThat(lastSelectQuery, containsString(" like '%display value 9%'")); - } @Test @@ -214,7 +216,7 @@ public class ValueSetExpansionR4Test extends BaseTermR4Test { myCaptureQueriesListener.clear(); ValueSet expandedValueSet = myTermSvc.expandValueSet(new ValueSetExpansionOptions(), input); List codes = expandedValueSet.getExpansion().getContains().stream().map(t -> t.getCode()).collect(Collectors.toList()); - assertThat(codes.toString(), codes, contains("code100", "code1000", "code1001", "code1002", "code1003", "code1004")); + assertThat(codes.toString(), codes, containsInAnyOrder("code100", "code1000", "code1001", "code1002", "code1003", "code1004")); // Make sure we used the pre-expanded version List selectQueries = myCaptureQueriesListener.getSelectQueries(); @@ -228,7 +230,7 @@ public class ValueSetExpansionR4Test extends BaseTermR4Test { myCaptureQueriesListener.clear(); ValueSet expandedValueSet = myValueSetDao.expand(vsId, "display value 100", 0, 1000, mySrd); List codes = expandedValueSet.getExpansion().getContains().stream().map(t -> t.getCode()).collect(Collectors.toList()); - assertThat(codes.toString(), codes, contains("code100", "code1000", "code1001", "code1002", "code1003", "code1004")); + assertThat(codes.toString(), codes, containsInAnyOrder("code100", "code1000", "code1001", "code1002", "code1003", "code1004")); // Make sure we used the pre-expanded version List selectQueries = myCaptureQueriesListener.getSelectQueries(); @@ -240,10 +242,12 @@ public class ValueSetExpansionR4Test extends BaseTermR4Test { } @Test - public void testExpandInline_IncludePreExpandedValueSetByUri_FilterOnDisplay_LeftMatch_SelectRange() { + public void testExpandIntestExpandInline_IncludePreExpandedValueSetByUri_FilterOnDisplay_LeftMatch_SelectRangeline_IncludePreExpandedValueSetByUri_FilterOnDisplay_LeftMatch_SelectRange() { myDaoConfig.setPreExpandValueSets(true); create100ConceptsCodeSystemAndValueSet(); + List expandedConceptCodes = getExpandedConceptsByValueSetUrl("http://foo/vs"); + ValueSet input = new ValueSet(); input.getCompose() .addInclude() @@ -253,16 +257,22 @@ public class ValueSetExpansionR4Test extends BaseTermR4Test { .setOp(ValueSet.FilterOperator.EQUAL) .setValue("display value 9"); + int offset = 3; + int count = 4; myCaptureQueriesListener.clear(); ValueSetExpansionOptions expansionOptions = new ValueSetExpansionOptions() - .setOffset(3) - .setCount(4); + .setOffset(offset) + .setCount(count); + + ValueSet expandedValueSet = myTermSvc.expandValueSet(expansionOptions, input); ourLog.debug("Expanded ValueSet:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(expandedValueSet)); - assertThat(toCodes(expandedValueSet).toString(), toCodes(expandedValueSet), contains( - "code92", "code93", "code94", "code95" - )); + //Take our intial expanded list, and only get the elements that are relevant. + expandedConceptCodes.removeIf(concept -> !concept.startsWith("code9")); + + //Ensure that the subsequent expansion with offset returns the same slice we are anticipating. + assertThat(toCodes(expandedValueSet).toString(), toCodes(expandedValueSet), is(equalTo(expandedConceptCodes.subList(offset, offset + count)))); assertEquals(4, expandedValueSet.getExpansion().getContains().size(), toCodes(expandedValueSet).toString()); assertEquals(11, expandedValueSet.getExpansion().getTotal()); @@ -270,10 +280,8 @@ public class ValueSetExpansionR4Test extends BaseTermR4Test { List selectQueries = myCaptureQueriesListener.getSelectQueries(); String lastSelectQuery = selectQueries.get(selectQueries.size() - 1).getSql(true, true).toLowerCase(); assertThat(lastSelectQuery, containsString(" like '%display value 9%'")); - } - @Test public void testExpandInline_IncludePreExpandedValueSetByUri_FilterOnDisplay_LeftMatchCaseInsensitive() { myDaoConfig.setPreExpandValueSets(true); @@ -326,7 +334,7 @@ public class ValueSetExpansionR4Test extends BaseTermR4Test { ValueSet expandedValueSet = myTermSvc.expandValueSet(new ValueSetExpansionOptions(), input); ourLog.debug("Expanded ValueSet:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(expandedValueSet)); - assertThat(toCodes(expandedValueSet).toString(), toCodes(expandedValueSet), contains( + assertThat(toCodes(expandedValueSet).toString(), toCodes(expandedValueSet), containsInAnyOrder( "code9", "code91", "code92", "code93", "code94", "code95", "code96", "code97", "code98", "code99" )); assertEquals(10, expandedValueSet.getExpansion().getContains().size(), toCodes(expandedValueSet).toString()); @@ -425,7 +433,7 @@ public class ValueSetExpansionR4Test extends BaseTermR4Test { ValueSet expandedValueSet = myTermSvc.expandValueSet(new ValueSetExpansionOptions(), input); ourLog.debug("Expanded ValueSet:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(expandedValueSet)); - assertThat(toCodes(expandedValueSet).toString(), toCodes(expandedValueSet), contains( + assertThat(toCodes(expandedValueSet).toString(), toCodes(expandedValueSet), containsInAnyOrder( "code9", "code90", "code91", "code92", "code93", "code94", "code95", "code96", "code97", "code98", "code99" )); assertEquals(11, expandedValueSet.getExpansion().getContains().size(), toCodes(expandedValueSet).toString()); @@ -474,52 +482,16 @@ public class ValueSetExpansionR4Test extends BaseTermR4Test { assertEquals(codeSystem.getConcept().size(), expandedValueSet.getExpansion().getContains().size()); - ValueSet.ValueSetExpansionContainsComponent containsComponent = expandedValueSet.getExpansion().getContains().get(0); - assertEquals("http://acme.org", containsComponent.getSystem()); - assertEquals("8450-9", containsComponent.getCode()); - assertEquals("Systolic blood pressure--expiration", containsComponent.getDisplay()); - assertEquals(2, containsComponent.getDesignation().size()); + ValueSet.ValueSetExpansionContainsComponent concept = assertExpandedValueSetContainsConcept(expandedValueSet, "http://acme.org", "8450-9", "Systolic blood pressure--expiration", 2); + assertConceptContainsDesignation(concept, "nl", "http://snomed.info/sct", "900000000000013009", "Synonym", "Systolische bloeddruk - expiratie"); + assertConceptContainsDesignation(concept, "sv", "http://snomed.info/sct", "900000000000013009", "Synonym", "Systoliskt blodtryck - utgång"); - ValueSet.ConceptReferenceDesignationComponent designationComponent = containsComponent.getDesignation().get(0); - assertEquals("nl", designationComponent.getLanguage()); - assertEquals("http://snomed.info/sct", designationComponent.getUse().getSystem()); - assertEquals("900000000000013009", designationComponent.getUse().getCode()); - assertEquals("Synonym", designationComponent.getUse().getDisplay()); - assertEquals("Systolische bloeddruk - expiratie", designationComponent.getValue()); + assertExpandedValueSetContainsConcept(expandedValueSet, "http://acme.org", "11378-7", "Systolic blood pressure at First encounter", 0); - designationComponent = containsComponent.getDesignation().get(1); - assertEquals("sv", designationComponent.getLanguage()); - assertEquals("http://snomed.info/sct", designationComponent.getUse().getSystem()); - assertEquals("900000000000013009", designationComponent.getUse().getCode()); - assertEquals("Synonym", designationComponent.getUse().getDisplay()); - assertEquals("Systoliskt blodtryck - utgång", designationComponent.getValue()); + ValueSet.ValueSetExpansionContainsComponent otherConcept = assertExpandedValueSetContainsConcept(expandedValueSet, "http://acme.org", "8491-3", "Systolic blood pressure 1 hour minimum", 1); + assertConceptContainsDesignation(otherConcept, "nl", "http://snomed.info/sct", "900000000000013009", "Synonym", "Systolische bloeddruk minimaal 1 uur"); - containsComponent = expandedValueSet.getExpansion().getContains().get(1); - assertEquals("http://acme.org", containsComponent.getSystem()); - assertEquals("11378-7", containsComponent.getCode()); - assertEquals("Systolic blood pressure at First encounter", containsComponent.getDisplay()); - assertFalse(containsComponent.hasDesignation()); - - // ... - - containsComponent = expandedValueSet.getExpansion().getContains().get(22); - assertEquals("http://acme.org", containsComponent.getSystem()); - assertEquals("8491-3", containsComponent.getCode()); - assertEquals("Systolic blood pressure 1 hour minimum", containsComponent.getDisplay()); - assertEquals(1, containsComponent.getDesignation().size()); - - designationComponent = containsComponent.getDesignation().get(0); - assertEquals("nl", designationComponent.getLanguage()); - assertEquals("http://snomed.info/sct", designationComponent.getUse().getSystem()); - assertEquals("900000000000013009", designationComponent.getUse().getCode()); - assertEquals("Synonym", designationComponent.getUse().getDisplay()); - assertEquals("Systolische bloeddruk minimaal 1 uur", designationComponent.getValue()); - - containsComponent = expandedValueSet.getExpansion().getContains().get(23); - assertEquals("http://acme.org", containsComponent.getSystem()); - assertEquals("8492-1", containsComponent.getCode()); - assertEquals("Systolic blood pressure 8 hour minimum", containsComponent.getDisplay()); - assertFalse(containsComponent.hasDesignation()); + assertExpandedValueSetContainsConcept(expandedValueSet, "http://acme.org", "8492-1", "Systolic blood pressure 8 hour minimum", 0); } @Test @@ -565,108 +537,39 @@ public class ValueSetExpansionR4Test extends BaseTermR4Test { assertEquals(codeSystem.getConcept().size(), expandedValueSet.getExpansion().getContains().size()); - ValueSet.ValueSetExpansionContainsComponent containsComponent = expandedValueSet.getExpansion().getContains().get(0); - assertEquals("http://acme.org", containsComponent.getSystem()); - assertEquals("8450-9", containsComponent.getCode()); - assertEquals("Systolic blood pressure--expiration", containsComponent.getDisplay()); - assertEquals(2, containsComponent.getDesignation().size()); + ValueSet.ValueSetExpansionContainsComponent concept = assertExpandedValueSetContainsConcept(expandedValueSet, "http://acme.org", "8450-9", "Systolic blood pressure--expiration", 2); - ValueSet.ConceptReferenceDesignationComponent designationComponent = containsComponent.getDesignation().get(0); - assertEquals("nl", designationComponent.getLanguage()); - assertEquals("http://snomed.info/sct", designationComponent.getUse().getSystem()); - assertEquals("900000000000013009", designationComponent.getUse().getCode()); - assertEquals("Synonym", designationComponent.getUse().getDisplay()); - assertEquals("Systolische bloeddruk - expiratie", designationComponent.getValue()); + assertConceptContainsDesignation(concept, "nl", "http://snomed.info/sct", "900000000000013009", "Synonym", "Systolische bloeddruk - expiratie"); + assertConceptContainsDesignation(concept, "sv", "http://snomed.info/sct", "900000000000013009", "Synonym", "Systoliskt blodtryck - utgång"); + assertExpandedValueSetContainsConcept(expandedValueSet, "http://acme.org", "11378-7", "Systolic blood pressure at First encounter", 0); + ValueSet.ValueSetExpansionContainsComponent otherConcept = assertExpandedValueSetContainsConcept(expandedValueSet, "http://acme.org", "8491-3", "Systolic blood pressure 1 hour minimum", 1); + assertConceptContainsDesignation(otherConcept, "nl", "http://snomed.info/sct", "900000000000013009", "Synonym", "Systolische bloeddruk minimaal 1 uur"); + assertExpandedValueSetContainsConcept(expandedValueSet, "http://acme.org", "8492-1", "Systolic blood pressure 8 hour minimum", 0); - designationComponent = containsComponent.getDesignation().get(1); - assertEquals("sv", designationComponent.getLanguage()); - assertEquals("http://snomed.info/sct", designationComponent.getUse().getSystem()); - assertEquals("900000000000013009", designationComponent.getUse().getCode()); - assertEquals("Synonym", designationComponent.getUse().getDisplay()); - assertEquals("Systoliskt blodtryck - utgång", designationComponent.getValue()); + ValueSet reexpandedValueSet = myTermSvc.expandValueSet(null, valueSet); + ourLog.info("Expanded ValueSet:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(reexpandedValueSet)); - containsComponent = expandedValueSet.getExpansion().getContains().get(1); - assertEquals("http://acme.org", containsComponent.getSystem()); - assertEquals("11378-7", containsComponent.getCode()); - assertEquals("Systolic blood pressure at First encounter", containsComponent.getDisplay()); - assertFalse(containsComponent.hasDesignation()); + assertEquals(codeSystem.getConcept().size(), reexpandedValueSet.getExpansion().getTotal()); + assertEquals(myDaoConfig.getPreExpandValueSetsDefaultOffset(), reexpandedValueSet.getExpansion().getOffset()); + assertEquals(0, reexpandedValueSet.getExpansion().getParameter().size()); + assertEquals(codeSystem.getConcept().size(), reexpandedValueSet.getExpansion().getContains().size()); - // ... + concept = assertExpandedValueSetContainsConcept(reexpandedValueSet, "http://acme.org", "8450-9", "Systolic blood pressure--expiration", 2); + assertConceptContainsDesignation(concept, "nl", "http://snomed.info/sct", "900000000000013009", "Synonym", "Systolische bloeddruk - expiratie"); + assertConceptContainsDesignation(concept, "sv", "http://snomed.info/sct", "900000000000013009", "Synonym", "Systoliskt blodtryck - utgång"); + assertExpandedValueSetContainsConcept(reexpandedValueSet, "http://acme.org", "11378-7", "Systolic blood pressure at First encounter", 0); + otherConcept = assertExpandedValueSetContainsConcept(reexpandedValueSet, "http://acme.org", "8491-3", "Systolic blood pressure 1 hour minimum", 1); + assertConceptContainsDesignation(otherConcept, "nl", "http://snomed.info/sct", "900000000000013009", "Synonym", "Systolische bloeddruk minimaal 1 uur"); - containsComponent = expandedValueSet.getExpansion().getContains().get(22); - assertEquals("http://acme.org", containsComponent.getSystem()); - assertEquals("8491-3", containsComponent.getCode()); - assertEquals("Systolic blood pressure 1 hour minimum", containsComponent.getDisplay()); - assertEquals(1, containsComponent.getDesignation().size()); + //Ensure they are streamed back in the same order. + List firstExpansionCodes = reexpandedValueSet.getExpansion().getContains().stream().map(cn -> cn.getCode()).collect(Collectors.toList()); + List secondExpansionCodes = expandedValueSet.getExpansion().getContains().stream().map(cn -> cn.getCode()).collect(Collectors.toList()); + assertThat(firstExpansionCodes, is(equalTo(secondExpansionCodes))); - designationComponent = containsComponent.getDesignation().get(0); - assertEquals("nl", designationComponent.getLanguage()); - assertEquals("http://snomed.info/sct", designationComponent.getUse().getSystem()); - assertEquals("900000000000013009", designationComponent.getUse().getCode()); - assertEquals("Synonym", designationComponent.getUse().getDisplay()); - assertEquals("Systolische bloeddruk minimaal 1 uur", designationComponent.getValue()); - - containsComponent = expandedValueSet.getExpansion().getContains().get(23); - assertEquals("http://acme.org", containsComponent.getSystem()); - assertEquals("8492-1", containsComponent.getCode()); - assertEquals("Systolic blood pressure 8 hour minimum", containsComponent.getDisplay()); - assertFalse(containsComponent.hasDesignation()); - - expandedValueSet = myTermSvc.expandValueSet(null, valueSet); - ourLog.info("Expanded ValueSet:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(expandedValueSet)); - - assertEquals(codeSystem.getConcept().size(), expandedValueSet.getExpansion().getTotal()); - assertEquals(myDaoConfig.getPreExpandValueSetsDefaultOffset(), expandedValueSet.getExpansion().getOffset()); - assertEquals(0, expandedValueSet.getExpansion().getParameter().size()); - - assertEquals(codeSystem.getConcept().size(), expandedValueSet.getExpansion().getContains().size()); - - containsComponent = expandedValueSet.getExpansion().getContains().get(0); - assertEquals("http://acme.org", containsComponent.getSystem()); - assertEquals("8450-9", containsComponent.getCode()); - assertEquals("Systolic blood pressure--expiration", containsComponent.getDisplay()); - assertEquals(2, containsComponent.getDesignation().size()); - - designationComponent = containsComponent.getDesignation().get(0); - assertEquals("nl", designationComponent.getLanguage()); - assertEquals("http://snomed.info/sct", designationComponent.getUse().getSystem()); - assertEquals("900000000000013009", designationComponent.getUse().getCode()); - assertEquals("Synonym", designationComponent.getUse().getDisplay()); - assertEquals("Systolische bloeddruk - expiratie", designationComponent.getValue()); - - designationComponent = containsComponent.getDesignation().get(1); - assertEquals("sv", designationComponent.getLanguage()); - assertEquals("http://snomed.info/sct", designationComponent.getUse().getSystem()); - assertEquals("900000000000013009", designationComponent.getUse().getCode()); - assertEquals("Synonym", designationComponent.getUse().getDisplay()); - assertEquals("Systoliskt blodtryck - utgång", designationComponent.getValue()); - - containsComponent = expandedValueSet.getExpansion().getContains().get(1); - assertEquals("http://acme.org", containsComponent.getSystem()); - assertEquals("11378-7", containsComponent.getCode()); - assertEquals("Systolic blood pressure at First encounter", containsComponent.getDisplay()); - assertFalse(containsComponent.hasDesignation()); - - // ... - - containsComponent = expandedValueSet.getExpansion().getContains().get(22); - assertEquals("http://acme.org", containsComponent.getSystem()); - assertEquals("8491-3", containsComponent.getCode()); - assertEquals("Systolic blood pressure 1 hour minimum", containsComponent.getDisplay()); - assertEquals(1, containsComponent.getDesignation().size()); - - designationComponent = containsComponent.getDesignation().get(0); - assertEquals("nl", designationComponent.getLanguage()); - assertEquals("http://snomed.info/sct", designationComponent.getUse().getSystem()); - assertEquals("900000000000013009", designationComponent.getUse().getCode()); - assertEquals("Synonym", designationComponent.getUse().getDisplay()); - assertEquals("Systolische bloeddruk minimaal 1 uur", designationComponent.getValue()); - - containsComponent = expandedValueSet.getExpansion().getContains().get(23); - assertEquals("http://acme.org", containsComponent.getSystem()); - assertEquals("8492-1", containsComponent.getCode()); - assertEquals("Systolic blood pressure 8 hour minimum", containsComponent.getDisplay()); - assertFalse(containsComponent.hasDesignation()); + //Ensure that internally the designations are expanded back in the same order. + List firstExpansionDesignationValues = reexpandedValueSet.getExpansion().getContains().stream().flatMap(cn -> cn.getDesignation().stream()).map(desig -> desig.getValue()).collect(Collectors.toList()); + List secondExpansionDesignationValues = expandedValueSet.getExpansion().getContains().stream().flatMap(cn -> cn.getDesignation().stream()).map(desig -> desig.getValue()).collect(Collectors.toList()); + assertThat(firstExpansionDesignationValues, is(equalTo(secondExpansionDesignationValues))); } @Test @@ -692,52 +595,20 @@ public class ValueSetExpansionR4Test extends BaseTermR4Test { assertEquals(codeSystem.getConcept().size(), expandedValueSet.getExpansion().getContains().size()); - ValueSet.ValueSetExpansionContainsComponent containsComponent = expandedValueSet.getExpansion().getContains().get(0); - assertEquals("http://acme.org", containsComponent.getSystem()); - assertEquals("8450-9", containsComponent.getCode()); - assertEquals("Systolic blood pressure--expiration", containsComponent.getDisplay()); - assertEquals(2, containsComponent.getDesignation().size()); + ValueSet.ValueSetExpansionContainsComponent concept = assertExpandedValueSetContainsConcept(expandedValueSet, "http://acme.org", "8450-9", "Systolic blood pressure--expiration", 2); - ValueSet.ConceptReferenceDesignationComponent designationComponent = containsComponent.getDesignation().get(0); - assertEquals("nl", designationComponent.getLanguage()); - assertEquals("http://snomed.info/sct", designationComponent.getUse().getSystem()); - assertEquals("900000000000013009", designationComponent.getUse().getCode()); - assertEquals("Synonym", designationComponent.getUse().getDisplay()); - assertEquals("Systolische bloeddruk - expiratie", designationComponent.getValue()); + assertConceptContainsDesignation(concept, "nl", "http://snomed.info/sct", "900000000000013009", "Synonym", "Systolische bloeddruk - expiratie"); + assertConceptContainsDesignation(concept, "sv", "http://snomed.info/sct", "900000000000013009", "Synonym", "Systoliskt blodtryck - utgång"); - designationComponent = containsComponent.getDesignation().get(1); - assertEquals("sv", designationComponent.getLanguage()); - assertEquals("http://snomed.info/sct", designationComponent.getUse().getSystem()); - assertEquals("900000000000013009", designationComponent.getUse().getCode()); - assertEquals("Synonym", designationComponent.getUse().getDisplay()); - assertEquals("Systoliskt blodtryck - utgång", designationComponent.getValue()); - - containsComponent = expandedValueSet.getExpansion().getContains().get(1); - assertEquals("http://acme.org", containsComponent.getSystem()); - assertEquals("11378-7", containsComponent.getCode()); - assertEquals("Systolic blood pressure at First encounter", containsComponent.getDisplay()); - assertFalse(containsComponent.hasDesignation()); + assertExpandedValueSetContainsConcept(expandedValueSet, "http://acme.org", "11378-7", "Systolic blood pressure at First encounter", 0); // ... - containsComponent = expandedValueSet.getExpansion().getContains().get(22); - assertEquals("http://acme.org", containsComponent.getSystem()); - assertEquals("8491-3", containsComponent.getCode()); - assertEquals("Systolic blood pressure 1 hour minimum", containsComponent.getDisplay()); - assertEquals(1, containsComponent.getDesignation().size()); + ValueSet.ValueSetExpansionContainsComponent otherConcept = assertExpandedValueSetContainsConcept(expandedValueSet, "http://acme.org", "8491-3", "Systolic blood pressure 1 hour minimum", 1); - designationComponent = containsComponent.getDesignation().get(0); - assertEquals("nl", designationComponent.getLanguage()); - assertEquals("http://snomed.info/sct", designationComponent.getUse().getSystem()); - assertEquals("900000000000013009", designationComponent.getUse().getCode()); - assertEquals("Synonym", designationComponent.getUse().getDisplay()); - assertEquals("Systolische bloeddruk minimaal 1 uur", designationComponent.getValue()); + assertConceptContainsDesignation(otherConcept, "nl", "http://snomed.info/sct", "900000000000013009", "Synonym", "Systolische bloeddruk minimaal 1 uur"); - containsComponent = expandedValueSet.getExpansion().getContains().get(23); - assertEquals("http://acme.org", containsComponent.getSystem()); - assertEquals("8492-1", containsComponent.getCode()); - assertEquals("Systolic blood pressure 8 hour minimum", containsComponent.getDisplay()); - assertFalse(containsComponent.hasDesignation()); + assertExpandedValueSetContainsConcept(expandedValueSet, "http://acme.org", "8492-1", "Systolic blood pressure 8 hour minimum", 0); } @Test @@ -754,6 +625,8 @@ public class ValueSetExpansionR4Test extends BaseTermR4Test { myTermSvc.preExpandDeferredValueSetsToTerminologyTables(); + List expandedConceptCodes = getExpandedConceptsByValueSetUrl("http://www.healthintersections.com.au/fhir/ValueSet/extensional-case-2"); + ValueSetExpansionOptions options = new ValueSetExpansionOptions() .setOffset(0) .setCount(23); @@ -769,47 +642,8 @@ public class ValueSetExpansionR4Test extends BaseTermR4Test { assertEquals(23, expandedValueSet.getExpansion().getParameter().get(1).getValueIntegerType().getValue().intValue()); assertEquals(23, expandedValueSet.getExpansion().getContains().size()); - - ValueSet.ValueSetExpansionContainsComponent containsComponent = expandedValueSet.getExpansion().getContains().get(0); - assertEquals("http://acme.org", containsComponent.getSystem()); - assertEquals("8450-9", containsComponent.getCode()); - assertEquals("Systolic blood pressure--expiration", containsComponent.getDisplay()); - assertEquals(2, containsComponent.getDesignation().size()); - - ValueSet.ConceptReferenceDesignationComponent designationComponent = containsComponent.getDesignation().get(0); - assertEquals("nl", designationComponent.getLanguage()); - assertEquals("http://snomed.info/sct", designationComponent.getUse().getSystem()); - assertEquals("900000000000013009", designationComponent.getUse().getCode()); - assertEquals("Synonym", designationComponent.getUse().getDisplay()); - assertEquals("Systolische bloeddruk - expiratie", designationComponent.getValue()); - - designationComponent = containsComponent.getDesignation().get(1); - assertEquals("sv", designationComponent.getLanguage()); - assertEquals("http://snomed.info/sct", designationComponent.getUse().getSystem()); - assertEquals("900000000000013009", designationComponent.getUse().getCode()); - assertEquals("Synonym", designationComponent.getUse().getDisplay()); - assertEquals("Systoliskt blodtryck - utgång", designationComponent.getValue()); - - containsComponent = expandedValueSet.getExpansion().getContains().get(1); - assertEquals("http://acme.org", containsComponent.getSystem()); - assertEquals("11378-7", containsComponent.getCode()); - assertEquals("Systolic blood pressure at First encounter", containsComponent.getDisplay()); - assertFalse(containsComponent.hasDesignation()); - - // ... - - containsComponent = expandedValueSet.getExpansion().getContains().get(22); - assertEquals("http://acme.org", containsComponent.getSystem()); - assertEquals("8491-3", containsComponent.getCode()); - assertEquals("Systolic blood pressure 1 hour minimum", containsComponent.getDisplay()); - assertEquals(1, containsComponent.getDesignation().size()); - - designationComponent = containsComponent.getDesignation().get(0); - assertEquals("nl", designationComponent.getLanguage()); - assertEquals("http://snomed.info/sct", designationComponent.getUse().getSystem()); - assertEquals("900000000000013009", designationComponent.getUse().getCode()); - assertEquals("Synonym", designationComponent.getUse().getDisplay()); - assertEquals("Systolische bloeddruk minimaal 1 uur", designationComponent.getValue()); + //It is enough to test that the sublist returned is the correct one. + assertThat(toCodes(expandedValueSet), is(equalTo(expandedConceptCodes.subList(0, 23)))); } @Test @@ -842,46 +676,15 @@ public class ValueSetExpansionR4Test extends BaseTermR4Test { assertEquals(23, expandedValueSet.getExpansion().getContains().size()); - ValueSet.ValueSetExpansionContainsComponent containsComponent = expandedValueSet.getExpansion().getContains().get(0); - assertEquals("http://acme.org", containsComponent.getSystem()); - assertEquals("8450-9", containsComponent.getCode()); - assertEquals("Systolic blood pressure--expiration", containsComponent.getDisplay()); - assertEquals(2, containsComponent.getDesignation().size()); + ValueSet.ValueSetExpansionContainsComponent concept = assertExpandedValueSetContainsConcept(expandedValueSet, "http://acme.org", "8450-9", "Systolic blood pressure--expiration", 2); - ValueSet.ConceptReferenceDesignationComponent designationComponent = containsComponent.getDesignation().get(0); - assertEquals("nl", designationComponent.getLanguage()); - assertEquals("http://snomed.info/sct", designationComponent.getUse().getSystem()); - assertEquals("900000000000013009", designationComponent.getUse().getCode()); - assertEquals("Synonym", designationComponent.getUse().getDisplay()); - assertEquals("Systolische bloeddruk - expiratie", designationComponent.getValue()); + assertConceptContainsDesignation(concept, "nl", "http://snomed.info/sct", "900000000000013009", "Synonym", "Systolische bloeddruk - expiratie"); + assertConceptContainsDesignation(concept, "sv", "http://snomed.info/sct", "900000000000013009", "Synonym", "Systoliskt blodtryck - utgång"); - designationComponent = containsComponent.getDesignation().get(1); - assertEquals("sv", designationComponent.getLanguage()); - assertEquals("http://snomed.info/sct", designationComponent.getUse().getSystem()); - assertEquals("900000000000013009", designationComponent.getUse().getCode()); - assertEquals("Synonym", designationComponent.getUse().getDisplay()); - assertEquals("Systoliskt blodtryck - utgång", designationComponent.getValue()); + assertExpandedValueSetContainsConcept(expandedValueSet, "http://acme.org", "11378-7", "Systolic blood pressure at First encounter", 0); - containsComponent = expandedValueSet.getExpansion().getContains().get(1); - assertEquals("http://acme.org", containsComponent.getSystem()); - assertEquals("11378-7", containsComponent.getCode()); - assertEquals("Systolic blood pressure at First encounter", containsComponent.getDisplay()); - assertFalse(containsComponent.hasDesignation()); - - // ... - - containsComponent = expandedValueSet.getExpansion().getContains().get(22); - assertEquals("http://acme.org", containsComponent.getSystem()); - assertEquals("8491-3", containsComponent.getCode()); - assertEquals("Systolic blood pressure 1 hour minimum", containsComponent.getDisplay()); - assertEquals(1, containsComponent.getDesignation().size()); - - designationComponent = containsComponent.getDesignation().get(0); - assertEquals("nl", designationComponent.getLanguage()); - assertEquals("http://snomed.info/sct", designationComponent.getUse().getSystem()); - assertEquals("900000000000013009", designationComponent.getUse().getCode()); - assertEquals("Synonym", designationComponent.getUse().getDisplay()); - assertEquals("Systolische bloeddruk minimaal 1 uur", designationComponent.getValue()); + ValueSet.ValueSetExpansionContainsComponent otherConcept = assertExpandedValueSetContainsConcept(expandedValueSet, "http://acme.org", "8491-3", "Systolic blood pressure 1 hour minimum", 1); + assertConceptContainsDesignation(otherConcept, "nl", "http://snomed.info/sct", "900000000000013009", "Synonym", "Systolische bloeddruk minimaal 1 uur"); } @Test @@ -960,7 +763,7 @@ public class ValueSetExpansionR4Test extends BaseTermR4Test { ourLog.info("ValueSet:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(valueSet)); myTermSvc.preExpandDeferredValueSetsToTerminologyTables(); - + List expandedConcepts = getExpandedConceptsByValueSetUrl("http://www.healthintersections.com.au/fhir/ValueSet/extensional-case-2"); ValueSetExpansionOptions options = new ValueSetExpansionOptions() .setOffset(1) .setCount(1000); @@ -976,39 +779,7 @@ public class ValueSetExpansionR4Test extends BaseTermR4Test { assertEquals(1000, expandedValueSet.getExpansion().getParameter().get(1).getValueIntegerType().getValue().intValue()); assertEquals(codeSystem.getConcept().size() - expandedValueSet.getExpansion().getOffset(), expandedValueSet.getExpansion().getContains().size()); - - ValueSet.ValueSetExpansionContainsComponent containsComponent = expandedValueSet.getExpansion().getContains().get(0); - assertEquals("http://acme.org", containsComponent.getSystem()); - assertEquals("11378-7", containsComponent.getCode()); - assertEquals("Systolic blood pressure at First encounter", containsComponent.getDisplay()); - assertFalse(containsComponent.hasDesignation()); - - containsComponent = expandedValueSet.getExpansion().getContains().get(1); - assertEquals("http://acme.org", containsComponent.getSystem()); - assertEquals("8493-9", containsComponent.getCode()); - assertEquals("Systolic blood pressure 10 hour minimum", containsComponent.getDisplay()); - assertFalse(containsComponent.hasDesignation()); - - // ... - - containsComponent = expandedValueSet.getExpansion().getContains().get(21); - assertEquals("http://acme.org", containsComponent.getSystem()); - assertEquals("8491-3", containsComponent.getCode()); - assertEquals("Systolic blood pressure 1 hour minimum", containsComponent.getDisplay()); - assertEquals(1, containsComponent.getDesignation().size()); - - ValueSet.ConceptReferenceDesignationComponent designationComponent = containsComponent.getDesignation().get(0); - assertEquals("nl", designationComponent.getLanguage()); - assertEquals("http://snomed.info/sct", designationComponent.getUse().getSystem()); - assertEquals("900000000000013009", designationComponent.getUse().getCode()); - assertEquals("Synonym", designationComponent.getUse().getDisplay()); - assertEquals("Systolische bloeddruk minimaal 1 uur", designationComponent.getValue()); - - containsComponent = expandedValueSet.getExpansion().getContains().get(22); - assertEquals("http://acme.org", containsComponent.getSystem()); - assertEquals("8492-1", containsComponent.getCode()); - assertEquals("Systolic blood pressure 8 hour minimum", containsComponent.getDisplay()); - assertFalse(containsComponent.hasDesignation()); + assertThat(toCodes(expandedValueSet), is(equalTo(expandedConcepts.subList(1,expandedConcepts.size())))); } @Test @@ -1024,7 +795,7 @@ public class ValueSetExpansionR4Test extends BaseTermR4Test { ourLog.info("ValueSet:\n" + myFhirCtx.newJsonParser().setPrettyPrint(true).encodeResourceToString(valueSet)); myTermSvc.preExpandDeferredValueSetsToTerminologyTables(); - + List expandedConcepts = getExpandedConceptsByValueSetUrl("http://www.healthintersections.com.au/fhir/ValueSet/extensional-case-2"); ValueSetExpansionOptions options = new ValueSetExpansionOptions() .setOffset(1) .setCount(1000); @@ -1040,39 +811,7 @@ public class ValueSetExpansionR4Test extends BaseTermR4Test { assertEquals(1000, expandedValueSet.getExpansion().getParameter().get(1).getValueIntegerType().getValue().intValue()); assertEquals(codeSystem.getConcept().size() - expandedValueSet.getExpansion().getOffset(), expandedValueSet.getExpansion().getContains().size()); - - ValueSet.ValueSetExpansionContainsComponent containsComponent = expandedValueSet.getExpansion().getContains().get(0); - assertEquals("http://acme.org", containsComponent.getSystem()); - assertEquals("11378-7", containsComponent.getCode()); - assertEquals("Systolic blood pressure at First encounter", containsComponent.getDisplay()); - assertFalse(containsComponent.hasDesignation()); - - containsComponent = expandedValueSet.getExpansion().getContains().get(1); - assertEquals("http://acme.org", containsComponent.getSystem()); - assertEquals("8493-9", containsComponent.getCode()); - assertEquals("Systolic blood pressure 10 hour minimum", containsComponent.getDisplay()); - assertFalse(containsComponent.hasDesignation()); - - // ... - - containsComponent = expandedValueSet.getExpansion().getContains().get(21); - assertEquals("http://acme.org", containsComponent.getSystem()); - assertEquals("8491-3", containsComponent.getCode()); - assertEquals("Systolic blood pressure 1 hour minimum", containsComponent.getDisplay()); - assertEquals(1, containsComponent.getDesignation().size()); - - ValueSet.ConceptReferenceDesignationComponent designationComponent = containsComponent.getDesignation().get(0); - assertEquals("nl", designationComponent.getLanguage()); - assertEquals("http://snomed.info/sct", designationComponent.getUse().getSystem()); - assertEquals("900000000000013009", designationComponent.getUse().getCode()); - assertEquals("Synonym", designationComponent.getUse().getDisplay()); - assertEquals("Systolische bloeddruk minimaal 1 uur", designationComponent.getValue()); - - containsComponent = expandedValueSet.getExpansion().getContains().get(22); - assertEquals("http://acme.org", containsComponent.getSystem()); - assertEquals("8492-1", containsComponent.getCode()); - assertEquals("Systolic blood pressure 8 hour minimum", containsComponent.getDisplay()); - assertFalse(containsComponent.hasDesignation()); + assertThat(toCodes(expandedValueSet), is(equalTo(expandedConcepts.subList(1,expandedConcepts.size())))); } @Test @@ -1089,6 +828,8 @@ public class ValueSetExpansionR4Test extends BaseTermR4Test { myTermSvc.preExpandDeferredValueSetsToTerminologyTables(); + List expandedConceptCodes = getExpandedConceptsByValueSetUrl("http://www.healthintersections.com.au/fhir/ValueSet/extensional-case-2"); + ValueSetExpansionOptions options = new ValueSetExpansionOptions() .setOffset(1) .setCount(22); @@ -1105,32 +846,8 @@ public class ValueSetExpansionR4Test extends BaseTermR4Test { assertEquals(22, expandedValueSet.getExpansion().getContains().size()); - ValueSet.ValueSetExpansionContainsComponent containsComponent = expandedValueSet.getExpansion().getContains().get(0); - assertEquals("http://acme.org", containsComponent.getSystem()); - assertEquals("11378-7", containsComponent.getCode()); - assertEquals("Systolic blood pressure at First encounter", containsComponent.getDisplay()); - assertFalse(containsComponent.hasDesignation()); - - containsComponent = expandedValueSet.getExpansion().getContains().get(1); - assertEquals("http://acme.org", containsComponent.getSystem()); - assertEquals("8493-9", containsComponent.getCode()); - assertEquals("Systolic blood pressure 10 hour minimum", containsComponent.getDisplay()); - assertFalse(containsComponent.hasDesignation()); - - // ... - - containsComponent = expandedValueSet.getExpansion().getContains().get(21); - assertEquals("http://acme.org", containsComponent.getSystem()); - assertEquals("8491-3", containsComponent.getCode()); - assertEquals("Systolic blood pressure 1 hour minimum", containsComponent.getDisplay()); - assertEquals(1, containsComponent.getDesignation().size()); - - ValueSet.ConceptReferenceDesignationComponent designationComponent = containsComponent.getDesignation().get(0); - assertEquals("nl", designationComponent.getLanguage()); - assertEquals("http://snomed.info/sct", designationComponent.getUse().getSystem()); - assertEquals("900000000000013009", designationComponent.getUse().getCode()); - assertEquals("Synonym", designationComponent.getUse().getDisplay()); - assertEquals("Systolische bloeddruk minimaal 1 uur", designationComponent.getValue()); + //It is enough to test that the sublist returned is the correct one. + assertThat(toCodes(expandedValueSet), is(equalTo(expandedConceptCodes.subList(1, 23)))); } @Test @@ -1162,6 +879,8 @@ public class ValueSetExpansionR4Test extends BaseTermR4Test { myTermSvc.preExpandDeferredValueSetsToTerminologyTables(); + List expandedConceptCodes = getExpandedConceptsByValueSetUrl("http://www.healthintersections.com.au/fhir/ValueSet/extensional-case-2"); + ValueSetExpansionOptions options = new ValueSetExpansionOptions() .setOffset(1) .setCount(22); @@ -1178,32 +897,8 @@ public class ValueSetExpansionR4Test extends BaseTermR4Test { assertEquals(22, expandedValueSet.getExpansion().getContains().size()); - ValueSet.ValueSetExpansionContainsComponent containsComponent = expandedValueSet.getExpansion().getContains().get(0); - assertEquals("http://acme.org", containsComponent.getSystem()); - assertEquals("11378-7", containsComponent.getCode()); - assertEquals("Systolic blood pressure at First encounter", containsComponent.getDisplay()); - assertFalse(containsComponent.hasDesignation()); - - containsComponent = expandedValueSet.getExpansion().getContains().get(1); - assertEquals("http://acme.org", containsComponent.getSystem()); - assertEquals("8493-9", containsComponent.getCode()); - assertEquals("Systolic blood pressure 10 hour minimum", containsComponent.getDisplay()); - assertFalse(containsComponent.hasDesignation()); - - // ... - - containsComponent = expandedValueSet.getExpansion().getContains().get(21); - assertEquals("http://acme.org", containsComponent.getSystem()); - assertEquals("8491-3", containsComponent.getCode()); - assertEquals("Systolic blood pressure 1 hour minimum", containsComponent.getDisplay()); - assertEquals(1, containsComponent.getDesignation().size()); - - ValueSet.ConceptReferenceDesignationComponent designationComponent = containsComponent.getDesignation().get(0); - assertEquals("nl", designationComponent.getLanguage()); - assertEquals("http://snomed.info/sct", designationComponent.getUse().getSystem()); - assertEquals("900000000000013009", designationComponent.getUse().getCode()); - assertEquals("Synonym", designationComponent.getUse().getDisplay()); - assertEquals("Systolische bloeddruk minimaal 1 uur", designationComponent.getValue()); + //It is enough to test that the sublist returned is the correct one. + assertThat(toCodes(expandedValueSet), is(equalTo(expandedConceptCodes.subList(1, 23)))); } @Test @@ -1533,60 +1228,19 @@ public class ValueSetExpansionR4Test extends BaseTermR4Test { assertEquals(codeSystem.getConcept().size(), termValueSet.getConcepts().size()); assertEquals(TermValueSetPreExpansionStatusEnum.EXPANDED, termValueSet.getExpansionStatus()); - TermValueSetConcept concept = termValueSet.getConcepts().get(0); - ourLog.info("Concept:\n" + concept.toString()); - assertEquals("http://acme.org", concept.getSystem()); - assertEquals("8450-9", concept.getCode()); - assertEquals("Systolic blood pressure--expiration", concept.getDisplay()); - assertEquals(2, concept.getDesignations().size()); - assertEquals(0, concept.getOrder()); + TermValueSetConcept concept = assertTermValueSetContainsConceptAndIsInDeclaredOrder(termValueSet, "http://acme.org", "8450-9", "Systolic blood pressure--expiration", 2); - TermValueSetConceptDesignation designation = concept.getDesignations().get(0); - assertEquals("nl", designation.getLanguage()); - assertEquals("http://snomed.info/sct", designation.getUseSystem()); - assertEquals("900000000000013009", designation.getUseCode()); - assertEquals("Synonym", designation.getUseDisplay()); - assertEquals("Systolische bloeddruk - expiratie", designation.getValue()); + assertTermConceptContainsDesignation(concept, "nl", "http://snomed.info/sct", "900000000000013009", "Synonym", "Systolische bloeddruk - expiratie"); + assertTermConceptContainsDesignation(concept, "sv", "http://snomed.info/sct", "900000000000013009", "Synonym", "Systoliskt blodtryck - utgång"); - designation = concept.getDesignations().get(1); - assertEquals("sv", designation.getLanguage()); - assertEquals("http://snomed.info/sct", designation.getUseSystem()); - assertEquals("900000000000013009", designation.getUseCode()); - assertEquals("Synonym", designation.getUseDisplay()); - assertEquals("Systoliskt blodtryck - utgång", designation.getValue()); + assertTermValueSetContainsConceptAndIsInDeclaredOrder(termValueSet, "http://acme.org", "11378-7", "Systolic blood pressure at First encounter", 0); - concept = termValueSet.getConcepts().get(1); - ourLog.info("Concept:\n" + concept.toString()); - assertEquals("http://acme.org", concept.getSystem()); - assertEquals("11378-7", concept.getCode()); - assertEquals("Systolic blood pressure at First encounter", concept.getDisplay()); - assertEquals(0, concept.getDesignations().size()); - assertEquals(1, concept.getOrder()); + TermValueSetConcept otherConcept = assertTermValueSetContainsConceptAndIsInDeclaredOrder(termValueSet, "http://acme.org", "8491-3", "Systolic blood pressure 1 hour minimum", 1); - // ... + assertTermConceptContainsDesignation(otherConcept, "nl", "http://snomed.info/sct", "900000000000013009", "Synonym", "Systolische bloeddruk minimaal 1 uur"); - concept = termValueSet.getConcepts().get(22); - ourLog.info("Concept:\n" + concept.toString()); - assertEquals("http://acme.org", concept.getSystem()); - assertEquals("8491-3", concept.getCode()); - assertEquals("Systolic blood pressure 1 hour minimum", concept.getDisplay()); - assertEquals(1, concept.getDesignations().size()); - assertEquals(22, concept.getOrder()); + assertTermValueSetContainsConceptAndIsInDeclaredOrder(termValueSet, "http://acme.org", "8492-1", "Systolic blood pressure 8 hour minimum", 0); - designation = concept.getDesignations().get(0); - assertEquals("nl", designation.getLanguage()); - assertEquals("http://snomed.info/sct", designation.getUseSystem()); - assertEquals("900000000000013009", designation.getUseCode()); - assertEquals("Synonym", designation.getUseDisplay()); - assertEquals("Systolische bloeddruk minimaal 1 uur", designation.getValue()); - - concept = termValueSet.getConcepts().get(23); - ourLog.info("Concept:\n" + concept.toString()); - assertEquals("http://acme.org", concept.getSystem()); - assertEquals("8492-1", concept.getCode()); - assertEquals("Systolic blood pressure 8 hour minimum", concept.getDisplay()); - assertEquals(0, concept.getDesignations().size()); - assertEquals(23, concept.getOrder()); }); } @@ -1635,60 +1289,16 @@ public class ValueSetExpansionR4Test extends BaseTermR4Test { assertEquals(codeSystem.getConcept().size(), termValueSet.getConcepts().size()); assertEquals(TermValueSetPreExpansionStatusEnum.EXPANDED, termValueSet.getExpansionStatus()); - TermValueSetConcept concept = termValueSet.getConcepts().get(0); - ourLog.info("Concept:\n" + concept.toString()); - assertEquals("http://acme.org", concept.getSystem()); - assertEquals("8450-9", concept.getCode()); - assertEquals("Systolic blood pressure--expiration", concept.getDisplay()); - assertEquals(2, concept.getDesignations().size()); - assertEquals(0, concept.getOrder()); + TermValueSetConcept concept = assertTermValueSetContainsConceptAndIsInDeclaredOrder(termValueSet, "http://acme.org", "8450-9", "Systolic blood pressure--expiration", 2); + assertTermConceptContainsDesignation(concept, "nl", "http://snomed.info/sct", "900000000000013009", "Synonym", "Systolische bloeddruk - expiratie"); + assertTermConceptContainsDesignation(concept, "sv", "http://snomed.info/sct", "900000000000013009", "Synonym", "Systoliskt blodtryck - utgång"); - TermValueSetConceptDesignation designation = concept.getDesignations().get(0); - assertEquals("nl", designation.getLanguage()); - assertEquals("http://snomed.info/sct", designation.getUseSystem()); - assertEquals("900000000000013009", designation.getUseCode()); - assertEquals("Synonym", designation.getUseDisplay()); - assertEquals("Systolische bloeddruk - expiratie", designation.getValue()); + assertTermValueSetContainsConceptAndIsInDeclaredOrder(termValueSet, "http://acme.org", "11378-7", "Systolic blood pressure at First encounter", 0); - designation = concept.getDesignations().get(1); - assertEquals("sv", designation.getLanguage()); - assertEquals("http://snomed.info/sct", designation.getUseSystem()); - assertEquals("900000000000013009", designation.getUseCode()); - assertEquals("Synonym", designation.getUseDisplay()); - assertEquals("Systoliskt blodtryck - utgång", designation.getValue()); + TermValueSetConcept otherConcept = assertTermValueSetContainsConceptAndIsInDeclaredOrder(termValueSet, "http://acme.org", "8491-3", "Systolic blood pressure 1 hour minimum", 1); + assertTermConceptContainsDesignation(otherConcept, "nl", "http://snomed.info/sct", "900000000000013009", "Synonym", "Systolische bloeddruk minimaal 1 uur"); - concept = termValueSet.getConcepts().get(1); - ourLog.info("Concept:\n" + concept.toString()); - assertEquals("http://acme.org", concept.getSystem()); - assertEquals("11378-7", concept.getCode()); - assertEquals("Systolic blood pressure at First encounter", concept.getDisplay()); - assertEquals(0, concept.getDesignations().size()); - assertEquals(1, concept.getOrder()); - - // ... - - concept = termValueSet.getConcepts().get(22); - ourLog.info("Concept:\n" + concept.toString()); - assertEquals("http://acme.org", concept.getSystem()); - assertEquals("8491-3", concept.getCode()); - assertEquals("Systolic blood pressure 1 hour minimum", concept.getDisplay()); - assertEquals(1, concept.getDesignations().size()); - assertEquals(22, concept.getOrder()); - - designation = concept.getDesignations().get(0); - assertEquals("nl", designation.getLanguage()); - assertEquals("http://snomed.info/sct", designation.getUseSystem()); - assertEquals("900000000000013009", designation.getUseCode()); - assertEquals("Synonym", designation.getUseDisplay()); - assertEquals("Systolische bloeddruk minimaal 1 uur", designation.getValue()); - - concept = termValueSet.getConcepts().get(23); - ourLog.info("Concept:\n" + concept.toString()); - assertEquals("http://acme.org", concept.getSystem()); - assertEquals("8492-1", concept.getCode()); - assertEquals("Systolic blood pressure 8 hour minimum", concept.getDisplay()); - assertEquals(0, concept.getDesignations().size()); - assertEquals(23, concept.getOrder()); + assertTermValueSetContainsConceptAndIsInDeclaredOrder(termValueSet, "http://acme.org", "8492-1", "Systolic blood pressure 8 hour minimum", 0); }); } @@ -1737,60 +1347,18 @@ public class ValueSetExpansionR4Test extends BaseTermR4Test { assertEquals(codeSystem.getConcept().size() - 2, termValueSet.getConcepts().size()); assertEquals(TermValueSetPreExpansionStatusEnum.EXPANDED, termValueSet.getExpansionStatus()); - TermValueSetConcept concept = termValueSet.getConcepts().get(0); - ourLog.info("Concept:\n" + concept.toString()); - assertEquals("http://acme.org", concept.getSystem()); - assertEquals("8450-9", concept.getCode()); - assertEquals("Systolic blood pressure--expiration", concept.getDisplay()); - assertEquals(2, concept.getDesignations().size()); - assertEquals(0, concept.getOrder()); + TermValueSetConcept concept = assertTermValueSetContainsConceptAndIsInDeclaredOrder(termValueSet, "http://acme.org", "8450-9", "Systolic blood pressure--expiration", 2); - TermValueSetConceptDesignation designation = concept.getDesignations().get(0); - assertEquals("nl", designation.getLanguage()); - assertEquals("http://snomed.info/sct", designation.getUseSystem()); - assertEquals("900000000000013009", designation.getUseCode()); - assertEquals("Synonym", designation.getUseDisplay()); - assertEquals("Systolische bloeddruk - expiratie", designation.getValue()); + assertTermConceptContainsDesignation(concept, "nl", "http://snomed.info/sct", "900000000000013009", "Synonym", "Systolische bloeddruk - expiratie"); + assertTermConceptContainsDesignation(concept, "sv", "http://snomed.info/sct", "900000000000013009", "Synonym", "Systoliskt blodtryck - utgång"); - designation = concept.getDesignations().get(1); - assertEquals("sv", designation.getLanguage()); - assertEquals("http://snomed.info/sct", designation.getUseSystem()); - assertEquals("900000000000013009", designation.getUseCode()); - assertEquals("Synonym", designation.getUseDisplay()); - assertEquals("Systoliskt blodtryck - utgång", designation.getValue()); - - concept = termValueSet.getConcepts().get(1); - ourLog.info("Concept:\n" + concept.toString()); - assertEquals("http://acme.org", concept.getSystem()); - assertEquals("11378-7", concept.getCode()); - assertEquals("Systolic blood pressure at First encounter", concept.getDisplay()); - assertEquals(0, concept.getDesignations().size()); - assertEquals(1, concept.getOrder()); + assertTermValueSetContainsConceptAndIsInDeclaredOrder(termValueSet, "http://acme.org", "11378-7", "Systolic blood pressure at First encounter", 0); // ... + TermValueSetConcept otherConcept = assertTermValueSetContainsConceptAndIsInDeclaredOrder(termValueSet, "http://acme.org", "8491-3", "Systolic blood pressure 1 hour minimum", 1); + assertTermConceptContainsDesignation(otherConcept, "nl", "http://snomed.info/sct", "900000000000013009", "Synonym", "Systolische bloeddruk minimaal 1 uur"); - concept = termValueSet.getConcepts().get(20); - ourLog.info("Concept:\n" + concept.toString()); - assertEquals("http://acme.org", concept.getSystem()); - assertEquals("8491-3", concept.getCode()); - assertEquals("Systolic blood pressure 1 hour minimum", concept.getDisplay()); - assertEquals(1, concept.getDesignations().size()); - assertEquals(20, concept.getOrder()); - - designation = concept.getDesignations().get(0); - assertEquals("nl", designation.getLanguage()); - assertEquals("http://snomed.info/sct", designation.getUseSystem()); - assertEquals("900000000000013009", designation.getUseCode()); - assertEquals("Synonym", designation.getUseDisplay()); - assertEquals("Systolische bloeddruk minimaal 1 uur", designation.getValue()); - - concept = termValueSet.getConcepts().get(21); - ourLog.info("Concept:\n" + concept.toString()); - assertEquals("http://acme.org", concept.getSystem()); - assertEquals("8492-1", concept.getCode()); - assertEquals("Systolic blood pressure 8 hour minimum", concept.getDisplay()); - assertEquals(0, concept.getDesignations().size()); - assertEquals(21, concept.getOrder()); + assertTermValueSetContainsConceptAndIsInDeclaredOrder(termValueSet, "http://acme.org", "8492-1", "Systolic blood pressure 8 hour minimum", 0); }); } @@ -1839,61 +1407,24 @@ public class ValueSetExpansionR4Test extends BaseTermR4Test { assertEquals(codeSystem.getConcept().size() - 2, termValueSet.getConcepts().size()); assertEquals(TermValueSetPreExpansionStatusEnum.EXPANDED, termValueSet.getExpansionStatus()); - TermValueSetConcept concept = termValueSet.getConcepts().get(0); - ourLog.info("Concept:\n" + concept.toString()); - assertEquals("http://acme.org", concept.getSystem()); - assertEquals("8450-9", concept.getCode()); - assertEquals("Systolic blood pressure--expiration", concept.getDisplay()); - assertEquals(2, concept.getDesignations().size()); - assertEquals(0, concept.getOrder()); - TermValueSetConceptDesignation designation = concept.getDesignations().get(0); - assertEquals("nl", designation.getLanguage()); - assertEquals("http://snomed.info/sct", designation.getUseSystem()); - assertEquals("900000000000013009", designation.getUseCode()); - assertEquals("Synonym", designation.getUseDisplay()); - assertEquals("Systolische bloeddruk - expiratie", designation.getValue()); + TermValueSetConcept concept = assertTermValueSetContainsConceptAndIsInDeclaredOrder(termValueSet, "http://acme.org", "8450-9", "Systolic blood pressure--expiration", 2); + assertEquals(termValueSet.getConcepts().indexOf(concept), concept.getOrder()); - designation = concept.getDesignations().get(1); - assertEquals("sv", designation.getLanguage()); - assertEquals("http://snomed.info/sct", designation.getUseSystem()); - assertEquals("900000000000013009", designation.getUseCode()); - assertEquals("Synonym", designation.getUseDisplay()); - assertEquals("Systoliskt blodtryck - utgång", designation.getValue()); + assertTermConceptContainsDesignation(concept, "nl", "http://snomed.info/sct", "900000000000013009", "Synonym", "Systolische bloeddruk - expiratie"); + assertTermConceptContainsDesignation(concept, "sv", "http://snomed.info/sct", "900000000000013009", "Synonym", "Systoliskt blodtryck - utgång"); - concept = termValueSet.getConcepts().get(1); - ourLog.info("Concept:\n" + concept.toString()); - assertEquals("http://acme.org", concept.getSystem()); - assertEquals("11378-7", concept.getCode()); - assertEquals("Systolic blood pressure at First encounter", concept.getDisplay()); - assertEquals(0, concept.getDesignations().size()); - assertEquals(1, concept.getOrder()); + assertTermValueSetContainsConceptAndIsInDeclaredOrder(termValueSet, "http://acme.org", "11378-7", "Systolic blood pressure at First encounter", 0); - // ... + assertTermValueSetContainsConceptAndIsInDeclaredOrder(termValueSet, "http://acme.org", "8492-1", "Systolic blood pressure 8 hour minimum", 0); - concept = termValueSet.getConcepts().get(20); - ourLog.info("Concept:\n" + concept.toString()); - assertEquals("http://acme.org", concept.getSystem()); - assertEquals("8491-3", concept.getCode()); - assertEquals("Systolic blood pressure 1 hour minimum", concept.getDisplay()); - assertEquals(1, concept.getDesignations().size()); - assertEquals(20, concept.getOrder()); + TermValueSetConcept concept2 = assertTermValueSetContainsConceptAndIsInDeclaredOrder(termValueSet, "http://acme.org", "8491-3", "Systolic blood pressure 1 hour minimum", 1); + assertTermConceptContainsDesignation(concept2, "nl", "http://snomed.info/sct", "900000000000013009", "Synonym", "Systolische bloeddruk minimaal 1 uur"); - designation = concept.getDesignations().get(0); - assertEquals("nl", designation.getLanguage()); - assertEquals("http://snomed.info/sct", designation.getUseSystem()); - assertEquals("900000000000013009", designation.getUseCode()); - assertEquals("Synonym", designation.getUseDisplay()); - assertEquals("Systolische bloeddruk minimaal 1 uur", designation.getValue()); - - concept = termValueSet.getConcepts().get(21); - ourLog.info("Concept:\n" + concept.toString()); - assertEquals("http://acme.org", concept.getSystem()); - assertEquals("8492-1", concept.getCode()); - assertEquals("Systolic blood pressure 8 hour minimum", concept.getDisplay()); - assertEquals(0, concept.getDesignations().size()); - assertEquals(21, concept.getOrder()); + assertTermValueSetContainsConceptAndIsInDeclaredOrder(termValueSet, "http://acme.org", "8492-1", "Systolic blood pressure 8 hour minimum", 0); }); } + + } diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/util/CoordCalculatorTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/util/CoordCalculatorTest.java index 794c53ed917..153518a895d 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/util/CoordCalculatorTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/util/CoordCalculatorTest.java @@ -1,6 +1,7 @@ package ca.uhn.fhir.jpa.util; -import org.hibernate.search.spatial.impl.Point; +import org.hibernate.search.engine.spatial.GeoBoundingBox; +import org.hibernate.search.engine.spatial.GeoPoint; import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -8,7 +9,6 @@ import org.slf4j.LoggerFactory; import static org.junit.jupiter.api.Assertions.assertEquals; public class CoordCalculatorTest { - private final Logger ourLog = LoggerFactory.getLogger(CoordCalculatorTest.class); // CHIN and UHN coordinates from Google Maps // Distance and bearing from https://www.movable-type.co.uk/scripts/latlong.html public static final double LATITUDE_CHIN = 43.65513; @@ -26,45 +26,45 @@ public class CoordCalculatorTest { @Test public void testCHINToUHN() { - Point result = CoordCalculator.findTarget(LATITUDE_CHIN, LONGITUDE_CHIN, BEARING_CHIN_TO_UHN, DISTANCE_KM_CHIN_TO_UHN); + GeoPoint result = CoordCalculator.findTarget(LATITUDE_CHIN, LONGITUDE_CHIN, BEARING_CHIN_TO_UHN, DISTANCE_KM_CHIN_TO_UHN); - assertEquals(LATITUDE_UHN, result.getLatitude(), 0.0001); - assertEquals(LONGITUDE_UHN, result.getLongitude(), 0.0001); + assertEquals(LATITUDE_UHN, result.latitude(), 0.0001); + assertEquals(LONGITUDE_UHN, result.longitude(), 0.0001); } @Test public void testBox() { - SearchBox box = CoordCalculator.getBox(LATITUDE_CHIN, LONGITUDE_CHIN, 1.0); + GeoBoundingBox box = CoordCalculator.getBox(LATITUDE_CHIN, LONGITUDE_CHIN, 1.0); double expectedLatitudeDelta = 0.0090; - assertEquals(LATITUDE_CHIN - expectedLatitudeDelta, box.getSouthWest().getLatitude(), 0.0001); - assertEquals(LATITUDE_CHIN + expectedLatitudeDelta, box.getNorthEast().getLatitude(), 0.0001); + assertEquals(LATITUDE_CHIN - expectedLatitudeDelta, box.bottomRight().latitude(), 0.0001); + assertEquals(LATITUDE_CHIN + expectedLatitudeDelta, box.topLeft().latitude(), 0.0001); double expectedLongitudeDelta = 0.012414; - assertEquals(LONGITUDE_CHIN - expectedLongitudeDelta, box.getSouthWest().getLongitude(), 0.0001); - assertEquals(LONGITUDE_CHIN + expectedLongitudeDelta, box.getNorthEast().getLongitude(), 0.0001); + assertEquals(LONGITUDE_CHIN - expectedLongitudeDelta, box.topLeft().longitude(), 0.0001); + assertEquals(LONGITUDE_CHIN + expectedLongitudeDelta, box.bottomRight().longitude(), 0.0001); } @Test public void testOnPrimeMeridian() { double meridianLongitide = 0.0; - SearchBox box = CoordCalculator.getBox(LATITUDE_CHIN, meridianLongitide, 1.0); + GeoBoundingBox box = CoordCalculator.getBox(LATITUDE_CHIN, meridianLongitide, 1.0); double expectedLatitudeDelta = 0.0090; - assertEquals(LATITUDE_CHIN - expectedLatitudeDelta, box.getSouthWest().getLatitude(), 0.0001); - assertEquals(LATITUDE_CHIN + expectedLatitudeDelta, box.getNorthEast().getLatitude(), 0.0001); + assertEquals(LATITUDE_CHIN - expectedLatitudeDelta, box.bottomRight().latitude(), 0.0001); + assertEquals(LATITUDE_CHIN + expectedLatitudeDelta, box.topLeft().latitude(), 0.0001); double expectedLongitudeDelta = 0.012414; - assertEquals(meridianLongitide - expectedLongitudeDelta, box.getSouthWest().getLongitude(), 0.0001); - assertEquals(meridianLongitide + expectedLongitudeDelta, box.getNorthEast().getLongitude(), 0.0001); + assertEquals(meridianLongitide - expectedLongitudeDelta, box.topLeft().longitude(), 0.0001); + assertEquals(meridianLongitide + expectedLongitudeDelta, box.bottomRight().longitude(), 0.0001); } @Test public void testOnAntiMeridian() { - SearchBox box = CoordCalculator.getBox(LATITUDE_TAVEUNI, LONGITIDE_TAVEUNI, 100.0); + GeoBoundingBox box = CoordCalculator.getBox(LATITUDE_TAVEUNI, LONGITIDE_TAVEUNI, 100.0); double expectedLatitudeDelta = 0.90; - assertEquals(LATITUDE_TAVEUNI - expectedLatitudeDelta, box.getSouthWest().getLatitude(), 0.01); - assertEquals(LATITUDE_TAVEUNI + expectedLatitudeDelta, box.getNorthEast().getLatitude(), 0.01); + assertEquals(LATITUDE_TAVEUNI - expectedLatitudeDelta, box.bottomRight().latitude(), 0.01); + assertEquals(LATITUDE_TAVEUNI + expectedLatitudeDelta, box.topLeft().latitude(), 0.01); double expectedLongitudeDelta = 0.94; - assertEquals(LONGITIDE_TAVEUNI - expectedLongitudeDelta, box.getSouthWest().getLongitude(), 0.01); + assertEquals(LONGITIDE_TAVEUNI - expectedLongitudeDelta, box.topLeft().longitude(), 0.01); // This case wraps - assertEquals(LONGITIDE_TAVEUNI + expectedLongitudeDelta - 360.0, box.getNorthEast().getLongitude(), 0.01); + assertEquals(LONGITIDE_TAVEUNI + expectedLongitudeDelta - 360.0, box.bottomRight().longitude(), 0.01); } } diff --git a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/util/JpaClasspathTest.java b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/util/JpaClasspathTest.java index ae5c19e828b..5e1c612d7e7 100644 --- a/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/util/JpaClasspathTest.java +++ b/hapi-fhir-jpaserver-base/src/test/java/ca/uhn/fhir/jpa/util/JpaClasspathTest.java @@ -1,5 +1,6 @@ package ca.uhn.fhir.jpa.util; +import org.junit.jupiter.api.Disabled; import org.junit.jupiter.api.Test; import static org.junit.jupiter.api.Assertions.fail; @@ -14,8 +15,8 @@ public class JpaClasspathTest { public void testNoLog4jOnClasspath() { try { - Class.forName("org.apache.logging.log4j.status.StatusLogger"); - fail("org.apache.logging.log4j.status.StatusLogger" + " found on classpath - Make sure log4j isn't being introduced"); + Class.forName("org.apache.logging.log4j.core.appender"); + fail("org.apache.logging.log4j.core.appender" + " found on classpath - Make sure log4j isn't being introduced"); } catch (ClassNotFoundException e) { // good } diff --git a/hapi-fhir-jpaserver-base/src/test/resources/logback-test.xml b/hapi-fhir-jpaserver-base/src/test/resources/logback-test.xml index 3b70abd6d9f..fa50db77690 100644 --- a/hapi-fhir-jpaserver-base/src/test/resources/logback-test.xml +++ b/hapi-fhir-jpaserver-base/src/test/resources/logback-test.xml @@ -19,7 +19,22 @@ - + + + + + + + + j + + + + + + + + @@ -31,6 +46,11 @@ + + + + + diff --git a/hapi-fhir-jpaserver-batch/pom.xml b/hapi-fhir-jpaserver-batch/pom.xml index ca476e5bbb5..b4b09a90306 100644 --- a/hapi-fhir-jpaserver-batch/pom.xml +++ b/hapi-fhir-jpaserver-batch/pom.xml @@ -45,7 +45,7 @@ org.hamcrest - java-hamcrest + hamcrest test diff --git a/hapi-fhir-jpaserver-mdm/pom.xml b/hapi-fhir-jpaserver-mdm/pom.xml index 87f1069fbdd..33619cfad67 100644 --- a/hapi-fhir-jpaserver-mdm/pom.xml +++ b/hapi-fhir-jpaserver-mdm/pom.xml @@ -28,7 +28,7 @@ org.hamcrest - java-hamcrest + hamcrest test diff --git a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/dao/MdmLinkDaoSvc.java b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/dao/MdmLinkDaoSvc.java index 7e4a709c43c..0573e95ffaf 100644 --- a/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/dao/MdmLinkDaoSvc.java +++ b/hapi-fhir-jpaserver-mdm/src/main/java/ca/uhn/fhir/jpa/mdm/dao/MdmLinkDaoSvc.java @@ -68,7 +68,7 @@ public class MdmLinkDaoSvc { mdmLink.setLinkSource(theLinkSource); mdmLink.setMatchResult(theMatchOutcome.getMatchResultEnum()); // Preserve these flags for link updates - mdmLink.setEidMatch(theMatchOutcome.isEidMatch() | mdmLink.isEidMatch()); + mdmLink.setEidMatch(theMatchOutcome.isEidMatch() | mdmLink.isEidMatchPresent()); mdmLink.setHadToCreateNewGoldenResource(theMatchOutcome.isCreatedNewResource() | mdmLink.getHadToCreateNewGoldenResource()); mdmLink.setMdmSourceType(myFhirContext.getResourceType(theSourceResource)); if (mdmLink.getScore() != null) { diff --git a/hapi-fhir-jpaserver-model/pom.xml b/hapi-fhir-jpaserver-model/pom.xml index 093bef8516c..f64bd911a98 100644 --- a/hapi-fhir-jpaserver-model/pom.xml +++ b/hapi-fhir-jpaserver-model/pom.xml @@ -79,8 +79,12 @@ - org.hibernate - hibernate-search-orm + org.hibernate.search + hibernate-search-mapper-orm + + + org.hibernate.search + hibernate-search-backend-elasticsearch diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/cross/IBasePersistedResource.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/cross/IBasePersistedResource.java index c7f3ee4c343..b1105f62388 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/cross/IBasePersistedResource.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/cross/IBasePersistedResource.java @@ -29,10 +29,8 @@ public interface IBasePersistedResource { IIdType getIdDt(); - boolean isDeleted(); - /** - * If the resource is deleted, returns the date/time that the resource was deleted at. Otherwie, returns null + * If the resource is deleted, returns the date/time that the resource was deleted at. Otherwise, returns null */ Date getDeleted(); diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/BaseHasResource.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/BaseHasResource.java index af90902d6eb..4e376cc6be4 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/BaseHasResource.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/BaseHasResource.java @@ -103,10 +103,7 @@ public abstract class BaseHasResource extends BasePartitionable implements IBase @Override public abstract Long getId(); - @Override - public boolean isDeleted() { - return myDeleted != null; - } + public void setDeleted(Date theDate) { myDeleted = theDate; diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/BaseResourceIndexedSearchParam.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/BaseResourceIndexedSearchParam.java index 17d5334e841..cd87fffc3c4 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/BaseResourceIndexedSearchParam.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/BaseResourceIndexedSearchParam.java @@ -32,8 +32,8 @@ import com.google.common.hash.HashCode; import com.google.common.hash.HashFunction; import com.google.common.hash.Hasher; import com.google.common.hash.Hashing; -import org.hibernate.search.annotations.ContainedIn; -import org.hibernate.search.annotations.Field; +import org.hibernate.search.mapper.pojo.mapping.definition.annotation.FullTextField; +import org.hibernate.search.mapper.pojo.mapping.definition.annotation.GenericField; import javax.annotation.Nullable; import javax.persistence.Column; @@ -60,27 +60,26 @@ public abstract class BaseResourceIndexedSearchParam extends BaseResourceIndex { private static final byte[] DELIMITER_BYTES = "|".getBytes(Charsets.UTF_8); private static final long serialVersionUID = 1L; - @Field() + @GenericField @Column(name = "SP_MISSING", nullable = false) private boolean myMissing = false; - @Field + @FullTextField @Column(name = "SP_NAME", length = MAX_SP_NAME, nullable = false) private String myParamName; @ManyToOne(optional = false, fetch = FetchType.LAZY, cascade = {}) @JoinColumn(name = "RES_ID", referencedColumnName = "RES_ID", nullable = false) - @ContainedIn private ResourceTable myResource; @Column(name = "RES_ID", insertable = false, updatable = false, nullable = false) private Long myResourcePid; - @Field() + @FullTextField @Column(name = "RES_TYPE", updatable = false, nullable = false, length = Constants.MAX_RESOURCE_NAME_LENGTH) private String myResourceType; - @Field() + @GenericField @Column(name = "SP_UPDATED", nullable = true) // TODO: make this false after HAPI 2.3 @Temporal(TemporalType.TIMESTAMP) private Date myUpdated; diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamCoords.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamCoords.java index f6aebb78065..b81b79366c8 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamCoords.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamCoords.java @@ -26,7 +26,7 @@ import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; -import org.hibernate.search.annotations.Field; +import org.hibernate.search.mapper.pojo.mapping.definition.annotation.FullTextField; import javax.persistence.Column; import javax.persistence.Embeddable; @@ -51,10 +51,10 @@ public class ResourceIndexedSearchParamCoords extends BaseResourceIndexedSearchP private static final long serialVersionUID = 1L; @Column(name = "SP_LATITUDE") - @Field + //@FullTextField public double myLatitude; @Column(name = "SP_LONGITUDE") - @Field + //@FullTextField public double myLongitude; @Id @SequenceGenerator(name = "SEQ_SPIDX_COORDS", sequenceName = "SEQ_SPIDX_COORDS") diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamDate.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamDate.java index 32cd3ba8e50..70c1579cb92 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamDate.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamDate.java @@ -32,7 +32,7 @@ import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; -import org.hibernate.search.annotations.Field; +import org.hibernate.search.mapper.pojo.mapping.definition.annotation.FullTextField; import org.hl7.fhir.r4.model.DateTimeType; import javax.persistence.Column; @@ -63,13 +63,15 @@ import java.util.Date; public class ResourceIndexedSearchParamDate extends BaseResourceIndexedSearchParam { private static final long serialVersionUID = 1L; + @Column(name = "SP_VALUE_HIGH", nullable = true) @Temporal(TemporalType.TIMESTAMP) - @Field + @FullTextField public Date myValueHigh; + @Column(name = "SP_VALUE_LOW", nullable = true) @Temporal(TemporalType.TIMESTAMP) - @Field + @FullTextField public Date myValueLow; /** diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamNumber.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamNumber.java index 57157873029..b9d3c047f80 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamNumber.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamNumber.java @@ -21,16 +21,13 @@ package ca.uhn.fhir.jpa.model.entity; */ import ca.uhn.fhir.jpa.model.config.PartitionSettings; -import ca.uhn.fhir.jpa.model.util.BigDecimalNumericFieldBridge; import ca.uhn.fhir.model.api.IQueryParameterType; import ca.uhn.fhir.rest.param.NumberParam; import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; -import org.hibernate.search.annotations.Field; -import org.hibernate.search.annotations.FieldBridge; -import org.hibernate.search.annotations.NumericField; +import org.hibernate.search.mapper.pojo.mapping.definition.annotation.ScaledNumberField; import javax.persistence.Column; import javax.persistence.Embeddable; @@ -56,10 +53,9 @@ public class ResourceIndexedSearchParamNumber extends BaseResourceIndexedSearchP private static final long serialVersionUID = 1L; @Column(name = "SP_VALUE", nullable = true) - @Field - @NumericField - @FieldBridge(impl = BigDecimalNumericFieldBridge.class) + @ScaledNumberField public BigDecimal myValue; + @Id @SequenceGenerator(name = "SEQ_SPIDX_NUMBER", sequenceName = "SEQ_SPIDX_NUMBER") @GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_SPIDX_NUMBER") diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamQuantity.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamQuantity.java index c77ac501398..5813c3b4a93 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamQuantity.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamQuantity.java @@ -22,16 +22,14 @@ package ca.uhn.fhir.jpa.model.entity; import ca.uhn.fhir.interceptor.model.RequestPartitionId; import ca.uhn.fhir.jpa.model.config.PartitionSettings; -import ca.uhn.fhir.jpa.model.util.BigDecimalNumericFieldBridge; import ca.uhn.fhir.model.api.IQueryParameterType; import ca.uhn.fhir.rest.param.QuantityParam; import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; -import org.hibernate.search.annotations.Field; -import org.hibernate.search.annotations.FieldBridge; -import org.hibernate.search.annotations.NumericField; +import org.hibernate.search.mapper.pojo.mapping.definition.annotation.FullTextField; +import org.hibernate.search.mapper.pojo.mapping.definition.annotation.ScaledNumberField; import javax.persistence.Column; import javax.persistence.Embeddable; @@ -65,16 +63,17 @@ public class ResourceIndexedSearchParamQuantity extends BaseResourceIndexedSearc private static final long serialVersionUID = 1L; @Column(name = "SP_SYSTEM", nullable = true, length = MAX_LENGTH) - @Field + @FullTextField public String mySystem; + @Column(name = "SP_UNITS", nullable = true, length = MAX_LENGTH) - @Field + @FullTextField public String myUnits; @Column(name = "SP_VALUE", nullable = true) - @Field - @NumericField - @FieldBridge(impl = BigDecimalNumericFieldBridge.class) + + @ScaledNumberField public BigDecimal myValue; + @Id @SequenceGenerator(name = "SEQ_SPIDX_QUANTITY", sequenceName = "SEQ_SPIDX_QUANTITY") @GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_SPIDX_QUANTITY") diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamString.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamString.java index 2f245b6b77a..5cbc85278d2 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamString.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamString.java @@ -29,13 +29,10 @@ import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; -import org.hibernate.search.annotations.Analyze; -import org.hibernate.search.annotations.Analyzer; -import org.hibernate.search.annotations.ContainedIn; -import org.hibernate.search.annotations.Field; -import org.hibernate.search.annotations.Fields; -import org.hibernate.search.annotations.Indexed; -import org.hibernate.search.annotations.Store; +import org.hibernate.search.engine.backend.types.Projectable; +import org.hibernate.search.engine.backend.types.Searchable; +import org.hibernate.search.mapper.pojo.mapping.definition.annotation.FullTextField; +import org.hibernate.search.mapper.pojo.mapping.definition.annotation.Indexed; import javax.persistence.Column; import javax.persistence.Embeddable; @@ -72,7 +69,6 @@ import static org.apache.commons.lang3.StringUtils.left; @Index(name = "IDX_SP_STRING_UPDATED", columnList = "SP_UPDATED"), @Index(name = "IDX_SP_STRING_RESID", columnList = "RES_ID") }) -@Indexed() public class ResourceIndexedSearchParamString extends BaseResourceIndexedSearchParam { /* @@ -89,16 +85,13 @@ public class ResourceIndexedSearchParamString extends BaseResourceIndexedSearchP @ManyToOne(optional = false) @JoinColumn(name = "RES_ID", referencedColumnName = "RES_ID", insertable = false, updatable = false, foreignKey = @ForeignKey(name = "FK_SPIDXSTR_RESOURCE")) - @ContainedIn private ResourceTable myResourceTable; @Column(name = "SP_VALUE_EXACT", length = MAX_LENGTH, nullable = true) - @Fields({ - @Field(name = "myValueText", index = org.hibernate.search.annotations.Index.YES, store = Store.YES, analyze = Analyze.YES, analyzer = @Analyzer(definition = "standardAnalyzer")), - @Field(name = "myValueTextEdgeNGram", index = org.hibernate.search.annotations.Index.YES, store = Store.NO, analyze = Analyze.YES, analyzer = @Analyzer(definition = "autocompleteEdgeAnalyzer")), - @Field(name = "myValueTextNGram", index = org.hibernate.search.annotations.Index.YES, store = Store.NO, analyze = Analyze.YES, analyzer = @Analyzer(definition = "autocompleteNGramAnalyzer")), - @Field(name = "myValueTextPhonetic", index = org.hibernate.search.annotations.Index.YES, store = Store.NO, analyze = Analyze.YES, analyzer = @Analyzer(definition = "autocompletePhoneticAnalyzer")) - }) +// @FullTextField(name = "myValueText", searchable=Searchable.YES, projectable = Projectable.YES, analyzer = "standardAnalyzer") +// @FullTextField(name = "myValueTextEdgeNGram", searchable=Searchable.YES, projectable = Projectable.NO, analyzer = "autocompleteEdgeAnalyzer") +// @FullTextField(name = "myValueTextNGram", searchable=Searchable.YES, projectable = Projectable.NO, analyzer = "autocompleteNGramAnalyzer") +// @FullTextField(name = "myValueTextPhonetic", searchable=Searchable.YES, projectable = Projectable.NO, analyzer = "autocompletePhoneticAnalyzer") private String myValueExact; @Column(name = "SP_VALUE_NORMALIZED", length = MAX_LENGTH, nullable = true) diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamToken.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamToken.java index 0b22db7bab1..63473f75e2a 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamToken.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamToken.java @@ -29,7 +29,7 @@ import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; -import org.hibernate.search.annotations.Field; +import org.hibernate.search.mapper.pojo.mapping.definition.annotation.FullTextField; import javax.persistence.Column; import javax.persistence.Embeddable; @@ -71,12 +71,14 @@ public class ResourceIndexedSearchParamToken extends BaseResourceIndexedSearchPa private static final long serialVersionUID = 1L; - @Field() + @FullTextField @Column(name = "SP_SYSTEM", nullable = true, length = MAX_LENGTH) public String mySystem; - @Field() + + @FullTextField @Column(name = "SP_VALUE", nullable = true, length = MAX_LENGTH) private String myValue; + @SuppressWarnings("unused") @Id @SequenceGenerator(name = "SEQ_SPIDX_TOKEN", sequenceName = "SEQ_SPIDX_TOKEN") diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamUri.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamUri.java index cb6d3eb53bf..db99cecb3f8 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamUri.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceIndexedSearchParamUri.java @@ -28,7 +28,7 @@ import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; import org.apache.commons.lang3.builder.ToStringBuilder; -import org.hibernate.search.annotations.Field; +import org.hibernate.search.mapper.pojo.mapping.definition.annotation.FullTextField; import javax.persistence.Column; import javax.persistence.Embeddable; @@ -61,7 +61,7 @@ public class ResourceIndexedSearchParamUri extends BaseResourceIndexedSearchPara private static final long serialVersionUID = 1L; @Column(name = "SP_URI", nullable = true, length = MAX_LENGTH) - @Field() + @FullTextField public String myUri; @Id diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceLink.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceLink.java index b272d83fa98..a722898a109 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceLink.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceLink.java @@ -23,7 +23,7 @@ package ca.uhn.fhir.jpa.model.entity; import org.apache.commons.lang3.Validate; import org.apache.commons.lang3.builder.EqualsBuilder; import org.apache.commons.lang3.builder.HashCodeBuilder; -import org.hibernate.search.annotations.Field; +import org.hibernate.search.mapper.pojo.mapping.definition.annotation.FullTextField; import org.hl7.fhir.instance.model.api.IIdType; import javax.persistence.Column; @@ -36,13 +36,11 @@ import javax.persistence.Id; import javax.persistence.Index; import javax.persistence.JoinColumn; import javax.persistence.ManyToOne; -import javax.persistence.OneToMany; import javax.persistence.SequenceGenerator; import javax.persistence.Table; import javax.persistence.Temporal; import javax.persistence.TemporalType; import javax.persistence.Transient; -import java.util.Collection; import java.util.Date; @Entity @@ -72,7 +70,7 @@ public class ResourceLink extends BaseResourceIndex { private Long mySourceResourcePid; @Column(name = "SOURCE_RESOURCE_TYPE", updatable = false, nullable = false, length = ResourceTable.RESTYPE_LEN) - @Field() + @FullTextField private String mySourceResourceType; @ManyToOne(optional = true, fetch = FetchType.LAZY) @@ -80,15 +78,18 @@ public class ResourceLink extends BaseResourceIndex { private ResourceTable myTargetResource; @Column(name = "TARGET_RESOURCE_ID", insertable = true, updatable = true, nullable = true) - @Field() + @FullTextField private Long myTargetResourcePid; + @Column(name = "TARGET_RESOURCE_TYPE", nullable = false, length = ResourceTable.RESTYPE_LEN) - @Field() + @FullTextField private String myTargetResourceType; + @Column(name = "TARGET_RESOURCE_URL", length = 200, nullable = true) - @Field() + @FullTextField private String myTargetResourceUrl; - @Field() + + @FullTextField @Column(name = "SP_UPDATED", nullable = true) // TODO: make this false after HAPI 2.3 @Temporal(TemporalType.TIMESTAMP) private Date myUpdated; diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceTable.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceTable.java index 0db1ab866e0..f0a9c24103d 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceTable.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/entity/ResourceTable.java @@ -22,20 +22,26 @@ package ca.uhn.fhir.jpa.model.entity; import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource; import ca.uhn.fhir.jpa.model.cross.IResourceLookup; +import ca.uhn.fhir.jpa.model.search.ResourceTableRoutingBinder; import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId; -import ca.uhn.fhir.jpa.model.search.IndexNonDeletedInterceptor; import ca.uhn.fhir.model.primitive.IdDt; import ca.uhn.fhir.rest.api.Constants; import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException; import org.apache.commons.lang3.builder.ToStringBuilder; import org.apache.commons.lang3.builder.ToStringStyle; import org.hibernate.annotations.OptimisticLock; -import org.hibernate.search.annotations.Analyze; -import org.hibernate.search.annotations.Analyzer; -import org.hibernate.search.annotations.Field; -import org.hibernate.search.annotations.Fields; -import org.hibernate.search.annotations.Indexed; -import org.hibernate.search.annotations.Store; +import org.hibernate.search.engine.backend.types.Projectable; +import org.hibernate.search.engine.backend.types.Searchable; +import org.hibernate.search.mapper.pojo.automaticindexing.ReindexOnUpdate; +import org.hibernate.search.mapper.pojo.bridge.mapping.annotation.RoutingBinderRef; +import org.hibernate.search.mapper.pojo.mapping.definition.annotation.DocumentId; +import org.hibernate.search.mapper.pojo.mapping.definition.annotation.FullTextField; +import org.hibernate.search.mapper.pojo.mapping.definition.annotation.GenericField; +import org.hibernate.search.mapper.pojo.mapping.definition.annotation.Indexed; +import org.hibernate.search.mapper.pojo.mapping.definition.annotation.IndexedEmbedded; +import org.hibernate.search.mapper.pojo.mapping.definition.annotation.IndexingDependency; +import org.hibernate.search.mapper.pojo.mapping.definition.annotation.ObjectPath; +import org.hibernate.search.mapper.pojo.mapping.definition.annotation.PropertyValue; import javax.persistence.*; import java.io.Serializable; @@ -48,7 +54,7 @@ import java.util.stream.Collectors; import static org.apache.commons.lang3.StringUtils.defaultString; -@Indexed(interceptor = IndexNonDeletedInterceptor.class) +@Indexed(routingBinder= @RoutingBinderRef(type = ResourceTableRoutingBinder.class)) @Entity @Table(name = "HFJ_RESOURCE", uniqueConstraints = {}, indexes = { @Index(name = "IDX_RES_DATE", columnList = "RES_UPDATED"), @@ -63,15 +69,18 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas /** * Holds the narrative text only - Used for Fulltext searching but not directly stored in the DB + * Note the extra config needed in HS6 for indexing transient props: + * https://docs.jboss.org/hibernate/search/6.0/migration/html_single/#indexed-transient-requires-configuration + * + * Note that we depend on `myVersion` updated for this field to be indexed. */ - @Transient() - @Fields({ - @Field(name = "myContentText", index = org.hibernate.search.annotations.Index.YES, store = Store.YES, analyze = Analyze.YES, analyzer = @Analyzer(definition = "standardAnalyzer")), - @Field(name = "myContentTextEdgeNGram", index = org.hibernate.search.annotations.Index.YES, store = Store.NO, analyze = Analyze.YES, analyzer = @Analyzer(definition = "autocompleteEdgeAnalyzer")), - @Field(name = "myContentTextNGram", index = org.hibernate.search.annotations.Index.YES, store = Store.NO, analyze = Analyze.YES, analyzer = @Analyzer(definition = "autocompleteNGramAnalyzer")), - @Field(name = "myContentTextPhonetic", index = org.hibernate.search.annotations.Index.YES, store = Store.NO, analyze = Analyze.YES, analyzer = @Analyzer(definition = "autocompletePhoneticAnalyzer")) - }) + @Transient + @FullTextField(name = "myContentText", searchable = Searchable.YES, projectable = Projectable.YES, analyzer = "standardAnalyzer") + @FullTextField(name = "myContentTextEdgeNGram", searchable= Searchable.YES, projectable= Projectable.NO, analyzer = "autocompleteEdgeAnalyzer") + @FullTextField(name = "myContentTextNGram", searchable= Searchable.YES, projectable= Projectable.NO, analyzer = "autocompleteNGramAnalyzer") + @FullTextField(name = "myContentTextPhonetic", searchable= Searchable.YES, projectable= Projectable.NO, analyzer = "autocompletePhoneticAnalyzer") @OptimisticLock(excluded = true) + @IndexingDependency(derivedFrom = @ObjectPath(@PropertyValue(propertyName = "myVersion"))) private String myContentText; @Column(name = "HASH_SHA256", length = 64, nullable = true) @@ -86,6 +95,7 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas @SequenceGenerator(name = "SEQ_RESOURCE_ID", sequenceName = "SEQ_RESOURCE_ID") @GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_RESOURCE_ID") @Column(name = "RES_ID") + @GenericField(projectable = Projectable.YES) private Long myId; @Column(name = "SP_INDEX_STATUS", nullable = true) @@ -100,13 +110,12 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas * Holds the narrative text only - Used for Fulltext searching but not directly stored in the DB */ @Transient() - @Fields({ - @Field(name = "myNarrativeText", index = org.hibernate.search.annotations.Index.YES, store = Store.YES, analyze = Analyze.YES, analyzer = @Analyzer(definition = "standardAnalyzer")), - @Field(name = "myNarrativeTextEdgeNGram", index = org.hibernate.search.annotations.Index.YES, store = Store.NO, analyze = Analyze.YES, analyzer = @Analyzer(definition = "autocompleteEdgeAnalyzer")), - @Field(name = "myNarrativeTextNGram", index = org.hibernate.search.annotations.Index.YES, store = Store.NO, analyze = Analyze.YES, analyzer = @Analyzer(definition = "autocompleteNGramAnalyzer")), - @Field(name = "myNarrativeTextPhonetic", index = org.hibernate.search.annotations.Index.YES, store = Store.NO, analyze = Analyze.YES, analyzer = @Analyzer(definition = "autocompletePhoneticAnalyzer")) - }) + @FullTextField(name = "myNarrativeText", searchable = Searchable.YES, projectable = Projectable.YES, analyzer = "standardAnalyzer") + @FullTextField(name = "myNarrativeTextEdgeNGram", searchable= Searchable.YES, projectable= Projectable.NO, analyzer = "autocompleteEdgeAnalyzer") + @FullTextField(name = "myNarrativeTextNGram", searchable= Searchable.YES, projectable= Projectable.NO, analyzer = "autocompleteNGramAnalyzer") + @FullTextField(name = "myNarrativeTextPhonetic", searchable= Searchable.YES, projectable= Projectable.NO, analyzer = "autocompletePhoneticAnalyzer") @OptimisticLock(excluded = true) + @IndexingDependency(derivedFrom = @ObjectPath(@PropertyValue(propertyName = "myVersion"))) private String myNarrativeText; @OneToMany(mappedBy = "myResource", cascade = {}, fetch = FetchType.LAZY, orphanRemoval = false) @@ -191,8 +200,9 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas * You can test that any changes don't cause extra queries by running * FhirResourceDaoR4QueryCountTest */ - @Field + @FullTextField @Transient + @IndexingDependency(derivedFrom = @ObjectPath(@PropertyValue(propertyName = "myResourceLinks"))) private String myResourceLinksField; @OneToMany(mappedBy = "myTargetResource", cascade = {}, fetch = FetchType.LAZY, orphanRemoval = false) @@ -200,7 +210,7 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas private Collection myResourceLinksAsTarget; @Column(name = "RES_TYPE", length = RESTYPE_LEN, nullable = false) - @Field + @FullTextField @OptimisticLock(excluded = true) private String myResourceType; @@ -531,14 +541,19 @@ public class ResourceTable extends BaseHasResource implements Serializable, IBas * and was not re-saved in the database */ public void setUnchangedInCurrentOperation(boolean theUnchangedInCurrentOperation) { + myUnchangedInCurrentOperation = theUnchangedInCurrentOperation; } - public void setContentTextParsedIntoWords(String theContentText) { + public void setContentText(String theContentText) { myContentText = theContentText; } - public void setNarrativeTextParsedIntoWords(String theNarrativeText) { + public String getContentText() { + return myContentText; + } + + public void setNarrativeText(String theNarrativeText) { myNarrativeText = theNarrativeText; } diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/search/IndexNonDeletedInterceptor.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/search/IndexNonDeletedInterceptor.java deleted file mode 100644 index 6c317cf27f5..00000000000 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/search/IndexNonDeletedInterceptor.java +++ /dev/null @@ -1,66 +0,0 @@ -package ca.uhn.fhir.jpa.model.search; - -/* - * #%L - * HAPI FHIR Model - * %% - * Copyright (C) 2014 - 2021 Smile CDR, Inc. - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import ca.uhn.fhir.jpa.model.entity.ResourceTable; -import org.hibernate.search.indexes.interceptor.EntityIndexingInterceptor; -import org.hibernate.search.indexes.interceptor.IndexingOverride; - -/** - * Note that this is a Hibernate Search interceptor, not a HAPI FHIR interceptor. - * It's used in {@link ResourceTable}. There is no reason for this to be used - * in any user code. - * - * Only store non-deleted resources - */ -public class IndexNonDeletedInterceptor implements EntityIndexingInterceptor { - - @Override - public IndexingOverride onAdd(ResourceTable entity) { - if (entity.getDeleted() == null) { - if (entity.getIndexStatus() != null) { - return IndexingOverride.APPLY_DEFAULT; - } - } - return IndexingOverride.SKIP; - } - - @Override - public IndexingOverride onUpdate(ResourceTable entity) { - if (entity.getIndexStatus() == null) { - return IndexingOverride.SKIP; - } - if (entity.getDeleted() == null) { - return IndexingOverride.UPDATE; - } - return IndexingOverride.REMOVE; - } - - @Override - public IndexingOverride onDelete(ResourceTable entity) { - return IndexingOverride.APPLY_DEFAULT; - } - - @Override - public IndexingOverride onCollectionUpdate(ResourceTable entity) { - return IndexingOverride.APPLY_DEFAULT; - } -} diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/search/ResourceTableRoutingBinder.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/search/ResourceTableRoutingBinder.java new file mode 100644 index 00000000000..a13f6e07416 --- /dev/null +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/search/ResourceTableRoutingBinder.java @@ -0,0 +1,53 @@ +package ca.uhn.fhir.jpa.model.search; + +/*- + * #%L + * HAPI FHIR Model + * %% + * Copyright (C) 2014 - 2020 University Health Network + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +import ca.uhn.fhir.jpa.model.entity.ResourceTable; +import org.hibernate.search.mapper.pojo.bridge.RoutingBridge; +import org.hibernate.search.mapper.pojo.bridge.binding.RoutingBindingContext; +import org.hibernate.search.mapper.pojo.bridge.mapping.programmatic.RoutingBinder; +import org.hibernate.search.mapper.pojo.bridge.runtime.RoutingBridgeRouteContext; +import org.hibernate.search.mapper.pojo.route.DocumentRoutes; + +public class ResourceTableRoutingBinder implements RoutingBinder { + @Override + public void bind(RoutingBindingContext theRoutingBindingContext) { + theRoutingBindingContext.dependencies().use("myDeleted").use("myIndexStatus"); + theRoutingBindingContext.bridge(ResourceTable.class, new ResourceTableBridge()); + } + + private static class ResourceTableBridge implements RoutingBridge { + + @Override + public void route(DocumentRoutes theDocumentRoutes, Object theO, ResourceTable theResourceTable, RoutingBridgeRouteContext theRoutingBridgeRouteContext) { + if (theResourceTable.getDeleted() == null && theResourceTable.getIndexStatus() != null ) { + theDocumentRoutes.addRoute(); + } else { + theDocumentRoutes.notIndexed(); + } + } + + @Override + public void previousRoutes(DocumentRoutes theDocumentRoutes, Object theO, ResourceTable theResourceTable, RoutingBridgeRouteContext theRoutingBridgeRouteContext) { + theDocumentRoutes.addRoute(); + } + } +} diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/util/BigDecimalNumericFieldBridge.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/util/BigDecimalNumericFieldBridge.java deleted file mode 100644 index 91beb4df7af..00000000000 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/util/BigDecimalNumericFieldBridge.java +++ /dev/null @@ -1,62 +0,0 @@ -package ca.uhn.fhir.jpa.model.util; - -/* - * #%L - * HAPI FHIR Model - * %% - * Copyright (C) 2014 - 2021 Smile CDR, Inc. - * %% - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * #L% - */ - -import org.apache.lucene.document.Document; -import org.apache.lucene.index.IndexableField; -import org.hibernate.search.bridge.LuceneOptions; -import org.hibernate.search.bridge.TwoWayFieldBridge; - -import java.math.BigDecimal; - -public class BigDecimalNumericFieldBridge implements TwoWayFieldBridge { - @Override - public void set(String name, Object value, Document document, LuceneOptions luceneOptions) { - if (value == null) { - if (luceneOptions.indexNullAs() != null) { - luceneOptions.addFieldToDocument(name, luceneOptions.indexNullAs(), document); - } - } else { - BigDecimal bdValue = (BigDecimal)value; - applyToLuceneOptions(luceneOptions, name, bdValue.doubleValue(), document); - } - } - - @Override - public final String objectToString(final Object object) { - return object == null ? null : object.toString(); - } - - @Override - public Object get(final String name, final Document document) { - final IndexableField field = document.getField(name); - if (field != null) { - Double doubleVal = (Double)field.numericValue(); - return new BigDecimal(doubleVal); - } else { - return null; - } - } - - protected void applyToLuceneOptions(LuceneOptions luceneOptions, String name, Number value, Document document) { - luceneOptions.addNumericFieldToDocument(name, value, document); - } -} diff --git a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/util/JpaConstants.java b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/util/JpaConstants.java index 32c1887f008..b24a677c985 100644 --- a/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/util/JpaConstants.java +++ b/hapi-fhir-jpaserver-model/src/main/java/ca/uhn/fhir/jpa/model/util/JpaConstants.java @@ -101,10 +101,6 @@ public class JpaConstants { */ // NB don't delete this, it's used in Smile as well, even though hapi-fhir-server uses the version from Constants.java public static final String OPERATION_VALIDATE = Constants.EXTOP_VALIDATE; - /** - * Operation name for the $suggest-keywords operation - */ - public static final String OPERATION_SUGGEST_KEYWORDS = "$suggest-keywords"; /** * Operation name for the $everything operation */ diff --git a/hapi-fhir-jpaserver-searchparam/pom.xml b/hapi-fhir-jpaserver-searchparam/pom.xml index f2e41eba2e3..647164ed6ec 100755 --- a/hapi-fhir-jpaserver-searchparam/pom.xml +++ b/hapi-fhir-jpaserver-searchparam/pom.xml @@ -109,8 +109,8 @@ spring-context - org.hibernate - hibernate-search-engine + org.hibernate.search + hibernate-search-mapper-orm org.jscience diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/BaseSearchParamExtractor.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/BaseSearchParamExtractor.java index 365020108ed..cd0e7ef1a60 100644 --- a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/BaseSearchParamExtractor.java +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/BaseSearchParamExtractor.java @@ -48,7 +48,7 @@ import com.google.common.collect.Sets; import org.apache.commons.lang3.ObjectUtils; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.Validate; -import org.hibernate.search.spatial.impl.Point; +import org.hibernate.search.engine.spatial.GeoPoint; import org.hl7.fhir.exceptions.FHIRException; import org.hl7.fhir.instance.model.api.IBase; import org.hl7.fhir.instance.model.api.IBaseEnumeration; @@ -77,6 +77,7 @@ import java.util.TreeSet; import java.util.regex.Pattern; import java.util.stream.Collectors; +import static ca.uhn.fhir.jpa.searchparam.extractor.GeopointNormalizer.normalizeLongitude; import static org.apache.commons.lang3.StringUtils.isBlank; import static org.apache.commons.lang3.StringUtils.isNotBlank; import static org.apache.commons.lang3.StringUtils.trim; @@ -775,13 +776,14 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor } // We only accept coordinates when both are present if (latitude != null && longitude != null) { - double normalizedLatitude = Point.normalizeLatitude(latitude.doubleValue()); - double normalizedLongitude = Point.normalizeLongitude(longitude.doubleValue()); + double normalizedLatitude = GeopointNormalizer.normalizeLatitude(latitude.doubleValue()); + double normalizedLongitude = GeopointNormalizer.normalizeLongitude(longitude.doubleValue()); ResourceIndexedSearchParamCoords nextEntity = new ResourceIndexedSearchParamCoords(myPartitionSettings, theResourceType, theSearchParam.getName(), normalizedLatitude, normalizedLongitude); theParams.add(nextEntity); } } + private void addString_HumanName(String theResourceType, Set theParams, RuntimeSearchParam theSearchParam, IBase theValue) { List families = extractValuesAsStrings(myHumanNameFamilyValueChild, theValue); for (String next : families) { diff --git a/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/GeopointNormalizer.java b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/GeopointNormalizer.java new file mode 100644 index 00000000000..aeaedc1e4e3 --- /dev/null +++ b/hapi-fhir-jpaserver-searchparam/src/main/java/ca/uhn/fhir/jpa/searchparam/extractor/GeopointNormalizer.java @@ -0,0 +1,82 @@ +package ca.uhn.fhir.jpa.searchparam.extractor; + +/*- + * #%L + * HAPI FHIR Search Parameters + * %% + * Copyright (C) 2014 - 2020 University Health Network + * %% + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * #L% + */ + +/** + * This class fully and unabashedly stolen from Hibernate search 5.11.4 FINAL's implementation as it was stripped in HS6 + */ +public class GeopointNormalizer { + + static int WHOLE_CIRCLE_DEGREE_RANGE = 360; + static int LONGITUDE_DEGREE_RANGE = WHOLE_CIRCLE_DEGREE_RANGE; + static int LATITUDE_DEGREE_RANGE = WHOLE_CIRCLE_DEGREE_RANGE / 2; + static int LATITUDE_DEGREE_MIN = -LATITUDE_DEGREE_RANGE / 2; + static int LATITUDE_DEGREE_MAX = LATITUDE_DEGREE_RANGE / 2; + + public static double normalizeLongitude(double longitude) { + if ( longitude == ( -LONGITUDE_DEGREE_RANGE / 2 ) ) { + return LONGITUDE_DEGREE_RANGE / 2 ; + } + else { + return normalizeLongitudeInclusive( longitude ); + } + } + + public static double normalizeLongitudeInclusive(double longitude) { + if ( (longitude < -( LONGITUDE_DEGREE_RANGE / 2 ) ) || (longitude > ( LONGITUDE_DEGREE_RANGE / 2 ) ) ) { + double _longitude; + // shift 180 and normalize full circle turn + _longitude = ( ( longitude + ( LONGITUDE_DEGREE_RANGE / 2 ) ) % WHOLE_CIRCLE_DEGREE_RANGE ); + // as Java % is not a math modulus we may have negative numbers so the unshift is sign dependant + if ( _longitude < 0 ) { + _longitude = _longitude + ( LONGITUDE_DEGREE_RANGE / 2 ); + } + else { + _longitude = _longitude - ( LONGITUDE_DEGREE_RANGE / 2 ); + } + return _longitude; + } + else { + return longitude; + } + } + + /** + * @param latitude in degrees + * @return latitude normalized in [-90;+90] + */ + public static double normalizeLatitude(double latitude) { + if ( latitude > LATITUDE_DEGREE_MAX || latitude < LATITUDE_DEGREE_MIN ) { + // shift 90, normalize full circle turn and 'symmetry' on the lat axis with abs + double _latitude = Math.abs( ( latitude + ( LATITUDE_DEGREE_RANGE / 2 ) ) % ( WHOLE_CIRCLE_DEGREE_RANGE ) ); + // Push 2nd and 3rd quadran in 1st and 4th by 'symmetry' + if ( _latitude > LATITUDE_DEGREE_RANGE ) { + _latitude = WHOLE_CIRCLE_DEGREE_RANGE - _latitude; + } + // unshift + _latitude = _latitude - ( LATITUDE_DEGREE_RANGE / 2 ); + return _latitude; + } + else { + return latitude; + } + } +} diff --git a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/config/TestJpaR4Config.java b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/config/TestJpaR4Config.java index 1e3dbd7a598..a6d1c6c5cc3 100644 --- a/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/config/TestJpaR4Config.java +++ b/hapi-fhir-jpaserver-test-utilities/src/main/java/ca/uhn/fhir/jpa/config/TestJpaR4Config.java @@ -24,6 +24,7 @@ import ca.uhn.fhir.context.FhirContext; import ca.uhn.fhir.context.support.DefaultProfileValidationSupport; import ca.uhn.fhir.jpa.binstore.IBinaryStorageSvc; import ca.uhn.fhir.jpa.binstore.MemoryBinaryStorageSvcImpl; +import ca.uhn.fhir.jpa.search.HapiLuceneAnalysisConfigurer; import ca.uhn.fhir.jpa.util.CircularQueueCaptureQueriesListener; import ca.uhn.fhir.jpa.util.CurrentThreadCaptureQueriesListener; import ca.uhn.fhir.rest.server.interceptor.RequestValidatingInterceptor; @@ -33,6 +34,10 @@ import net.ttddyy.dsproxy.listener.logging.SLF4JLogLevel; import net.ttddyy.dsproxy.support.ProxyDataSourceBuilder; import org.apache.commons.dbcp2.BasicDataSource; import org.hibernate.dialect.H2Dialect; +import org.hibernate.search.backend.lucene.cfg.LuceneBackendSettings; +import org.hibernate.search.backend.lucene.cfg.LuceneIndexSettings; +import org.hibernate.search.engine.cfg.BackendSettings; +import org.hibernate.search.mapper.orm.cfg.HibernateOrmMapperSettings; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; @@ -103,10 +108,12 @@ public class TestJpaR4Config extends BaseJavaConfigR4 { extraProperties.put("hibernate.show_sql", "false"); extraProperties.put("hibernate.hbm2ddl.auto", "update"); extraProperties.put("hibernate.dialect", H2Dialect.class.getName()); - extraProperties.put("hibernate.search.model_mapping", ca.uhn.fhir.jpa.search.LuceneSearchMappingFactory.class.getName()); - extraProperties.put("hibernate.search.default.directory_provider", "local-heap"); - extraProperties.put("hibernate.search.lucene_version", "LUCENE_CURRENT"); - extraProperties.put("hibernate.search.autoregister_listeners", "true"); + + extraProperties.put(BackendSettings.backendKey(BackendSettings.TYPE), "lucene"); + extraProperties.put(BackendSettings.backendKey(LuceneBackendSettings.ANALYSIS_CONFIGURER), HapiLuceneAnalysisConfigurer.class.getName()); + extraProperties.put(BackendSettings.backendKey(LuceneIndexSettings.DIRECTORY_TYPE), "local-heap"); + extraProperties.put(BackendSettings.backendKey(LuceneBackendSettings.LUCENE_VERSION), "LUCENE_CURRENT"); + extraProperties.put(HibernateOrmMapperSettings.ENABLED, "true"); return extraProperties; } diff --git a/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/config/TestDstu2Config.java b/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/config/TestDstu2Config.java index ffad3ee79cd..365957556f4 100644 --- a/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/config/TestDstu2Config.java +++ b/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/config/TestDstu2Config.java @@ -3,7 +3,7 @@ package ca.uhn.fhirtest.config; import ca.uhn.fhir.jpa.api.config.DaoConfig; import ca.uhn.fhir.jpa.config.BaseJavaConfigDstu2; import ca.uhn.fhir.jpa.model.entity.ModelConfig; -import ca.uhn.fhir.jpa.search.LuceneSearchMappingFactory; +import ca.uhn.fhir.jpa.search.HapiLuceneAnalysisConfigurer; import ca.uhn.fhir.jpa.util.CurrentThreadCaptureQueriesListener; import ca.uhn.fhir.jpa.util.DerbyTenSevenHapiFhirDialect; import ca.uhn.fhir.jpa.validation.ValidationSettings; @@ -15,6 +15,10 @@ import net.ttddyy.dsproxy.support.ProxyDataSourceBuilder; import org.apache.commons.dbcp2.BasicDataSource; import org.apache.commons.lang3.time.DateUtils; import org.hibernate.dialect.PostgreSQL94Dialect; +import org.hibernate.search.backend.lucene.cfg.LuceneBackendSettings; +import org.hibernate.search.backend.lucene.cfg.LuceneIndexSettings; +import org.hibernate.search.engine.cfg.BackendSettings; +import org.hibernate.search.mapper.orm.cfg.HibernateOrmMapperSettings; import org.hl7.fhir.dstu2.model.Subscription; import org.hl7.fhir.r5.utils.IResourceValidator; import org.springframework.beans.factory.annotation.Value; @@ -150,10 +154,13 @@ public class TestDstu2Config extends BaseJavaConfigDstu2 { extraProperties.put("hibernate.cache.use_second_level_cache", "false"); extraProperties.put("hibernate.cache.use_structured_entries", "false"); extraProperties.put("hibernate.cache.use_minimal_puts", "false"); - extraProperties.put("hibernate.search.model_mapping", LuceneSearchMappingFactory.class.getName()); - extraProperties.put("hibernate.search.default.directory_provider", "filesystem"); - extraProperties.put("hibernate.search.default.indexBase", myFhirLuceneLocation); - extraProperties.put("hibernate.search.lucene_version", "LUCENE_CURRENT"); + + extraProperties.put(BackendSettings.backendKey(BackendSettings.TYPE), "lucene"); + extraProperties.put(BackendSettings.backendKey(LuceneBackendSettings.ANALYSIS_CONFIGURER), HapiLuceneAnalysisConfigurer.class.getName()); + extraProperties.put(BackendSettings.backendKey(LuceneIndexSettings.DIRECTORY_TYPE), "local-filesystem"); + extraProperties.put(BackendSettings.backendKey(LuceneIndexSettings.DIRECTORY_ROOT), myFhirLuceneLocation); + extraProperties.put(BackendSettings.backendKey(LuceneBackendSettings.LUCENE_VERSION), "LUCENE_CURRENT"); + return extraProperties; } diff --git a/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/config/TestDstu3Config.java b/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/config/TestDstu3Config.java index b1a8a962e5a..b9d81163c10 100644 --- a/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/config/TestDstu3Config.java +++ b/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/config/TestDstu3Config.java @@ -4,7 +4,7 @@ import ca.uhn.fhir.jpa.api.config.DaoConfig; import ca.uhn.fhir.jpa.config.BaseJavaConfigDstu3; import ca.uhn.fhir.jpa.model.entity.ModelConfig; import ca.uhn.fhir.jpa.search.DatabaseBackedPagingProvider; -import ca.uhn.fhir.jpa.search.LuceneSearchMappingFactory; +import ca.uhn.fhir.jpa.search.HapiLuceneAnalysisConfigurer; import ca.uhn.fhir.jpa.util.CurrentThreadCaptureQueriesListener; import ca.uhn.fhir.jpa.util.DerbyTenSevenHapiFhirDialect; import ca.uhn.fhir.jpa.validation.ValidationSettings; @@ -15,6 +15,10 @@ import net.ttddyy.dsproxy.support.ProxyDataSourceBuilder; import org.apache.commons.dbcp2.BasicDataSource; import org.apache.commons.lang3.time.DateUtils; import org.hibernate.dialect.PostgreSQL94Dialect; +import org.hibernate.search.backend.lucene.cfg.LuceneBackendSettings; +import org.hibernate.search.backend.lucene.cfg.LuceneIndexSettings; +import org.hibernate.search.engine.cfg.BackendSettings; +import org.hibernate.search.mapper.orm.cfg.HibernateOrmMapperSettings; import org.hl7.fhir.dstu2.model.Subscription; import org.hl7.fhir.r5.utils.IResourceValidator; import org.springframework.beans.factory.annotation.Autowire; @@ -158,10 +162,13 @@ public class TestDstu3Config extends BaseJavaConfigDstu3 { extraProperties.put("hibernate.cache.use_second_level_cache", "false"); extraProperties.put("hibernate.cache.use_structured_entries", "false"); extraProperties.put("hibernate.cache.use_minimal_puts", "false"); - extraProperties.put("hibernate.search.model_mapping", LuceneSearchMappingFactory.class.getName()); - extraProperties.put("hibernate.search.default.directory_provider", "filesystem"); - extraProperties.put("hibernate.search.default.indexBase", myFhirLuceneLocation); - extraProperties.put("hibernate.search.lucene_version", "LUCENE_CURRENT"); + + extraProperties.put(BackendSettings.backendKey(BackendSettings.TYPE), "lucene"); + extraProperties.put(BackendSettings.backendKey(LuceneBackendSettings.ANALYSIS_CONFIGURER), HapiLuceneAnalysisConfigurer.class.getName()); + extraProperties.put(BackendSettings.backendKey(LuceneIndexSettings.DIRECTORY_TYPE), "local-filesystem"); + extraProperties.put(BackendSettings.backendKey(LuceneIndexSettings.DIRECTORY_ROOT), myFhirLuceneLocation); + extraProperties.put(BackendSettings.backendKey(LuceneBackendSettings.LUCENE_VERSION), "LUCENE_CURRENT"); + return extraProperties; } diff --git a/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/config/TestR4Config.java b/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/config/TestR4Config.java index c748dd9dc5f..dc20bd902c7 100644 --- a/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/config/TestR4Config.java +++ b/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/config/TestR4Config.java @@ -4,7 +4,7 @@ import ca.uhn.fhir.jpa.api.config.DaoConfig; import ca.uhn.fhir.jpa.config.BaseJavaConfigR4; import ca.uhn.fhir.jpa.model.entity.ModelConfig; import ca.uhn.fhir.jpa.search.DatabaseBackedPagingProvider; -import ca.uhn.fhir.jpa.search.LuceneSearchMappingFactory; +import ca.uhn.fhir.jpa.search.HapiLuceneAnalysisConfigurer; import ca.uhn.fhir.jpa.util.CurrentThreadCaptureQueriesListener; import ca.uhn.fhir.jpa.util.DerbyTenSevenHapiFhirDialect; import ca.uhn.fhir.jpa.validation.ValidationSettings; @@ -16,6 +16,10 @@ import net.ttddyy.dsproxy.support.ProxyDataSourceBuilder; import org.apache.commons.dbcp2.BasicDataSource; import org.apache.commons.lang3.time.DateUtils; import org.hibernate.dialect.PostgreSQL94Dialect; +import org.hibernate.search.backend.lucene.cfg.LuceneBackendSettings; +import org.hibernate.search.backend.lucene.cfg.LuceneIndexSettings; +import org.hibernate.search.engine.cfg.BackendSettings; +import org.hibernate.search.mapper.orm.cfg.HibernateOrmMapperSettings; import org.hl7.fhir.dstu2.model.Subscription; import org.hl7.fhir.r5.utils.IResourceValidator; import org.springframework.beans.factory.annotation.Autowire; @@ -151,10 +155,12 @@ public class TestR4Config extends BaseJavaConfigR4 { extraProperties.put("hibernate.cache.use_second_level_cache", "false"); extraProperties.put("hibernate.cache.use_structured_entries", "false"); extraProperties.put("hibernate.cache.use_minimal_puts", "false"); - extraProperties.put("hibernate.search.model_mapping", LuceneSearchMappingFactory.class.getName()); - extraProperties.put("hibernate.search.default.directory_provider", "filesystem"); - extraProperties.put("hibernate.search.default.indexBase", myFhirLuceneLocation); - extraProperties.put("hibernate.search.lucene_version", "LUCENE_CURRENT"); + + extraProperties.put(BackendSettings.backendKey(BackendSettings.TYPE), "lucene"); + extraProperties.put(BackendSettings.backendKey(LuceneBackendSettings.ANALYSIS_CONFIGURER), HapiLuceneAnalysisConfigurer.class.getName()); + extraProperties.put(BackendSettings.backendKey(LuceneIndexSettings.DIRECTORY_TYPE), "local-filesystem"); + extraProperties.put(BackendSettings.backendKey(LuceneIndexSettings.DIRECTORY_ROOT), myFhirLuceneLocation); + extraProperties.put(BackendSettings.backendKey(LuceneBackendSettings.LUCENE_VERSION), "LUCENE_CURRENT"); return extraProperties; } diff --git a/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/config/TestR5Config.java b/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/config/TestR5Config.java index d8e7610da59..e5f3a5b2aee 100644 --- a/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/config/TestR5Config.java +++ b/hapi-fhir-jpaserver-uhnfhirtest/src/main/java/ca/uhn/fhirtest/config/TestR5Config.java @@ -4,7 +4,7 @@ import ca.uhn.fhir.jpa.api.config.DaoConfig; import ca.uhn.fhir.jpa.config.BaseJavaConfigR5; import ca.uhn.fhir.jpa.model.entity.ModelConfig; import ca.uhn.fhir.jpa.search.DatabaseBackedPagingProvider; -import ca.uhn.fhir.jpa.search.LuceneSearchMappingFactory; +import ca.uhn.fhir.jpa.search.HapiLuceneAnalysisConfigurer; import ca.uhn.fhir.jpa.util.CurrentThreadCaptureQueriesListener; import ca.uhn.fhir.jpa.util.DerbyTenSevenHapiFhirDialect; import ca.uhn.fhir.jpa.validation.ValidationSettings; @@ -15,6 +15,10 @@ import net.ttddyy.dsproxy.support.ProxyDataSourceBuilder; import org.apache.commons.dbcp2.BasicDataSource; import org.apache.commons.lang3.time.DateUtils; import org.hibernate.dialect.PostgreSQL94Dialect; +import org.hibernate.search.backend.lucene.cfg.LuceneBackendSettings; +import org.hibernate.search.backend.lucene.cfg.LuceneIndexSettings; +import org.hibernate.search.engine.cfg.BackendSettings; +import org.hibernate.search.mapper.orm.cfg.HibernateOrmMapperSettings; import org.hl7.fhir.dstu2.model.Subscription; import org.hl7.fhir.r5.utils.IResourceValidator; import org.springframework.beans.factory.annotation.Autowire; @@ -151,10 +155,13 @@ public class TestR5Config extends BaseJavaConfigR5 { extraProperties.put("hibernate.cache.use_second_level_cache", "false"); extraProperties.put("hibernate.cache.use_structured_entries", "false"); extraProperties.put("hibernate.cache.use_minimal_puts", "false"); - extraProperties.put("hibernate.search.model_mapping", LuceneSearchMappingFactory.class.getName()); - extraProperties.put("hibernate.search.default.directory_provider", "filesystem"); - extraProperties.put("hibernate.search.default.indexBase", myFhirLuceneLocation); - extraProperties.put("hibernate.search.lucene_version", "LUCENE_CURRENT"); + + extraProperties.put(BackendSettings.backendKey(BackendSettings.TYPE), "lucene"); + extraProperties.put(BackendSettings.backendKey(LuceneBackendSettings.ANALYSIS_CONFIGURER), HapiLuceneAnalysisConfigurer.class.getName()); + extraProperties.put(BackendSettings.backendKey(LuceneIndexSettings.DIRECTORY_TYPE), "local-filesystem"); + extraProperties.put(BackendSettings.backendKey(LuceneIndexSettings.DIRECTORY_ROOT), myFhirLuceneLocation); + extraProperties.put(BackendSettings.backendKey(LuceneBackendSettings.LUCENE_VERSION), "LUCENE_CURRENT"); + return extraProperties; } diff --git a/hapi-fhir-oauth2/pom.xml b/hapi-fhir-oauth2/pom.xml index 7ba35d308e9..128e87b5a6c 100644 --- a/hapi-fhir-oauth2/pom.xml +++ b/hapi-fhir-oauth2/pom.xml @@ -107,7 +107,7 @@ org.hamcrest - java-hamcrest + hamcrest ${hamcrest_version} test diff --git a/pom.xml b/pom.xml index bfede46a1dc..6af0ba64420 100644 --- a/pom.xml +++ b/pom.xml @@ -746,11 +746,11 @@ 3.0.2 5.7.0 6.5.4 - 5.4.26.Final + 6.0.0.Final - 5.11.5.Final - 5.5.5 + 8.7.0 + 2.2 6.1.5.Final 4.4.13 4.5.13 @@ -765,9 +765,10 @@ 9.8.0-15 1.2_5 1.7.30 + 2.11.1 5.3.2 - 2.2.0.RELEASE + 2.4.2 4.2.3.RELEASE 2.4.1 1.2.2.RELEASE @@ -1169,12 +1170,12 @@ org.apache.lucene - lucene-highlighter + lucene-analyzers-phonetic ${lucene_version} org.apache.lucene - lucene-analyzers-phonetic + lucene-backward-codecs ${lucene_version} @@ -1332,6 +1333,12 @@ ucum ${ucum_version} + + org.rauschig + jarchivelib + 1.0.0 + test + org.fusesource.jansi jansi @@ -1387,16 +1394,10 @@ jscience 4.3.1 - - - org.hamcrest - java-hamcrest - 2.0.0.0 - org.hamcrest hamcrest - 2.2 + ${hamcrest_version} org.hibernate @@ -1424,18 +1425,34 @@ ${hibernate_validator_version} - org.hibernate - hibernate-search-orm + org.apache.logging.log4j + log4j-to-slf4j + ${log4j_to_slf4j_version} + + + org.hibernate.search + hibernate-search-mapper-orm + ${hibernate_search_version} + + + org.apache.logging.log4j + log4j-api + + + + + org.elasticsearch.client + elasticsearch-rest-high-level-client + 7.10.0 + + + org.hibernate.search + hibernate-search-backend-elasticsearch ${hibernate_search_version} - org.hibernate - hibernate-search-elasticsearch - ${hibernate_search_version} - - - org.hibernate - hibernate-search-engine + org.hibernate.search + hibernate-search-backend-lucene ${hibernate_search_version} @@ -1512,6 +1529,11 @@ jcl-over-slf4j ${slf4j_version} + + org.slf4j + log4j-over-slf4j + ${slf4j_version} + org.springframework spring-beans @@ -1664,9 +1686,22 @@ 2.4.0 - pl.allegro.tech - embedded-elasticsearch - 2.10.0 + org.testcontainers + testcontainers + 1.15.1 + test + + + org.testcontainers + elasticsearch + 1.15.1 + test + + + org.testcontainers + junit-jupiter + 1.15.1 + test xpp3 @@ -2530,7 +2565,6 @@ hapi-fhir-validation-resources-r4 hapi-fhir-structures-r5 hapi-fhir-validation-resources-r5 - hapi-fhir-elasticsearch-6 hapi-fhir-jpaserver-api hapi-fhir-jpaserver-model hapi-fhir-jpaserver-searchparam @@ -2539,8 +2573,8 @@ hapi-fhir-jaxrsserver-example hapi-fhir-jpaserver-batch hapi-fhir-jpaserver-base - hapi-fhir-jpaserver-mdm hapi-fhir-jpaserver-migrate + hapi-fhir-jpaserver-mdm restful-server-example hapi-fhir-testpage-overlay hapi-fhir-jpaserver-uhnfhirtest