Additional tests and cleanup.
This commit is contained in:
parent
d03ce6533e
commit
4a89860f9b
|
@ -20,7 +20,6 @@ package ca.uhn.fhir.cli;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.demo.*;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.demo.ContextHolder;
|
||||
import ca.uhn.fhir.jpa.demo.FhirServerConfig;
|
||||
|
@ -48,10 +47,8 @@ public class RunServerCommand extends BaseCommand {
|
|||
private static final String OPTION_LOWMEM = "lowmem";
|
||||
private static final String OPTION_ALLOW_EXTERNAL_REFS = "allow-external-refs";
|
||||
private static final String OPTION_REUSE_SEARCH_RESULTS_MILLIS = "reuse-search-results-milliseconds";
|
||||
private static final String OPTION_EXTERNAL_ELASTICSEARCH = "external-elasticsearch";
|
||||
private static final int DEFAULT_PORT = 8080;
|
||||
private static final String OPTION_P = "p";
|
||||
private static final String OPTION_POSTGRES = "postgresql";
|
||||
|
||||
// TODO: Don't use qualified names for loggers in HAPI CLI.
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(RunServerCommand.class);
|
||||
|
@ -73,8 +70,6 @@ public class RunServerCommand extends BaseCommand {
|
|||
options.addOption(null, OPTION_LOWMEM, false, "If this flag is set, the server will operate in low memory mode (some features disabled)");
|
||||
options.addOption(null, OPTION_ALLOW_EXTERNAL_REFS, false, "If this flag is set, the server will allow resources to be persisted contaning external resource references");
|
||||
options.addOption(null, OPTION_DISABLE_REFERENTIAL_INTEGRITY, false, "If this flag is set, the server will not enforce referential integrity");
|
||||
options.addOption(null, OPTION_EXTERNAL_ELASTICSEARCH, false, "If this flag is set, the server will attempt to use a local elasticsearch server listening on port 9301");
|
||||
options.addOption(null, OPTION_POSTGRES, false, "If this flag is set, the server will attempt to use a local postgresql DB instance listening on port 5432");
|
||||
|
||||
addOptionalOption(options, "u", "url", "Url", "If this option is set, specifies the JDBC URL to use for the database connection");
|
||||
addOptionalOption(options, "d", "default-size", "PageSize", "If this option is set, specifies the default page size for number of query results");
|
||||
|
@ -115,18 +110,8 @@ public class RunServerCommand extends BaseCommand {
|
|||
ContextHolder.setDisableReferentialIntegrity(true);
|
||||
}
|
||||
|
||||
if (theCommandLine.hasOption(OPTION_EXTERNAL_ELASTICSEARCH)) {
|
||||
ourLog.info("Server is configured to use external elasticsearch");
|
||||
ContextHolder.setExternalElasticsearch(true);
|
||||
}
|
||||
|
||||
ContextHolder.setDatabaseUrl(theCommandLine.getOptionValue("u"));
|
||||
|
||||
if (theCommandLine.hasOption(OPTION_POSTGRES)) {
|
||||
ourLog.info("Server is configured to use PostgreSQL database");
|
||||
ContextHolder.setPostgreSql(true);
|
||||
}
|
||||
|
||||
String defaultPageSize = theCommandLine.getOptionValue("d");
|
||||
String maxPageSize = theCommandLine.getOptionValue("m");
|
||||
if (defaultPageSize != null) {
|
||||
|
|
|
@ -46,7 +46,6 @@ import java.util.concurrent.TimeUnit;
|
|||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
|
||||
@SuppressWarnings("Duplicates")
|
||||
// TODO: Merge this with new CommonPostgreSQLConfig or find way to avoid conflicts with it.
|
||||
@Configuration
|
||||
public class CommonConfig {
|
||||
|
||||
|
@ -76,42 +75,6 @@ public class CommonConfig {
|
|||
*/
|
||||
@Bean(destroyMethod = "close")
|
||||
public DataSource dataSource() {
|
||||
if (ContextHolder.isPostGreSql()) {
|
||||
return getPostgreSqlDataSource();
|
||||
} else {
|
||||
return getH2DataSource();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* The following method creates a PostgreSQL database connection. The 'url' property value of "jdbc:postgresql://localhost:5432/hapi" indicates that the server should save resources in a
|
||||
* PostgreSQL database named "hapi".
|
||||
*
|
||||
* A URL to a remote database could also be placed here, along with login credentials and other properties supported by BasicDataSource.
|
||||
*/
|
||||
private DataSource getPostgreSqlDataSource() {
|
||||
String dbUrl = "jdbc:postgresql://localhost:5432/hapi";
|
||||
String dbUsername = "hapi";
|
||||
String dbPassword = "HapiFHIR";
|
||||
if (isNotBlank(ContextHolder.getDatabaseUrl())) {
|
||||
dbUrl = ContextHolder.getDatabaseUrl();
|
||||
}
|
||||
|
||||
BasicDataSource retVal = new BasicDataSource();
|
||||
retVal.setDriverClassName("org.postgresql.Driver");
|
||||
retVal.setUrl(dbUrl);
|
||||
retVal.setUsername(dbUsername);
|
||||
retVal.setPassword(dbPassword);
|
||||
return retVal;
|
||||
}
|
||||
|
||||
/**
|
||||
* The following method creates an H2 database connection. The 'url' property value of "jdbc:h2:file:target./jpaserver_h2_files" indicates that the server should save resources in a
|
||||
* directory called "jpaserver_h2_files".
|
||||
*
|
||||
* A URL to a remote database could also be placed here, along with login credentials and other properties supported by BasicDataSource.
|
||||
*/
|
||||
private DataSource getH2DataSource() {
|
||||
String url = "jdbc:h2:file:./target/jpaserver_h2_files";
|
||||
if (isNotBlank(ContextHolder.getDatabaseUrl())) {
|
||||
url = ContextHolder.getDatabaseUrl();
|
||||
|
@ -127,14 +90,6 @@ public class CommonConfig {
|
|||
|
||||
@Bean
|
||||
public Properties jpaProperties() {
|
||||
if (ContextHolder.isPostGreSql()) {
|
||||
return getPostGreSqlJpaProperties();
|
||||
} else {
|
||||
return getH2JpaProperties();
|
||||
}
|
||||
}
|
||||
|
||||
private Properties getH2JpaProperties() {
|
||||
Properties extraProperties = new Properties();
|
||||
extraProperties.put("hibernate.dialect", H2Dialect.class.getName());
|
||||
extraProperties.put("hibernate.format_sql", "true");
|
||||
|
@ -158,44 +113,12 @@ public class CommonConfig {
|
|||
return configureElasticearch(extraProperties);
|
||||
}
|
||||
|
||||
private Properties getPostGreSqlJpaProperties() {
|
||||
|
||||
Properties extraProperties = new Properties();
|
||||
extraProperties.put("hibernate.dialect", org.hibernate.dialect.PostgreSQL94Dialect.class.getName());
|
||||
extraProperties.put("hibernate.format_sql", "false");
|
||||
extraProperties.put("hibernate.show_sql", "false");
|
||||
extraProperties.put("hibernate.hbm2ddl.auto", "update");
|
||||
extraProperties.put("hibernate.jdbc.batch_size", "20");
|
||||
extraProperties.put("hibernate.cache.use_query_cache", "false");
|
||||
extraProperties.put("hibernate.cache.use_second_level_cache", "false");
|
||||
extraProperties.put("hibernate.cache.use_structured_entries", "false");
|
||||
extraProperties.put("hibernate.cache.use_minimal_puts", "false");
|
||||
extraProperties.put("hibernate.search.model_mapping", LuceneSearchMappingFactory.class.getName());
|
||||
extraProperties.put("hibernate.search.default.directory_provider", "local-heap");
|
||||
extraProperties.put("hibernate.search.lucene_version", "LUCENE_CURRENT");
|
||||
extraProperties.put("hibernate.search.default.worker.execution", "sync");
|
||||
|
||||
if (System.getProperty("lowmem") != null) {
|
||||
extraProperties.put("hibernate.search.autoregister_listeners", "false");
|
||||
}
|
||||
|
||||
return configureElasticearch(extraProperties);
|
||||
}
|
||||
|
||||
private Properties configureElasticearch(Properties theExtraProperties) {
|
||||
|
||||
String elasticsearchHost = "localhost";
|
||||
String elasticsearchUserId = "";
|
||||
String elasticsearchPassword = "";
|
||||
Integer elasticsearchPort;
|
||||
|
||||
if(ContextHolder.isExternalElasticsearch()) {
|
||||
elasticsearchUserId = "elastic";
|
||||
elasticsearchPassword = "changeme";
|
||||
elasticsearchPort = 9301;
|
||||
} else {
|
||||
elasticsearchPort = embeddedElasticSearch().getHttpPort();
|
||||
}
|
||||
int elasticsearchPort = embeddedElasticSearch().getHttpPort();
|
||||
|
||||
new ElasticsearchHibernatePropertiesBuilder()
|
||||
.setDebugRefreshAfterWrite(true)
|
||||
|
@ -212,29 +135,11 @@ public class CommonConfig {
|
|||
|
||||
}
|
||||
|
||||
@Bean()
|
||||
public ElasticsearchSvcImpl myElasticsearchSvc() throws IOException {
|
||||
String elasticsearchHost = "localhost";
|
||||
String elasticsearchUserId = "";
|
||||
String elasticsearchPassword = "";
|
||||
Integer elasticsearchPort;
|
||||
|
||||
if(ContextHolder.isExternalElasticsearch()) {
|
||||
elasticsearchUserId = "elastic";
|
||||
elasticsearchPassword = "changeme";
|
||||
elasticsearchPort = 9301;
|
||||
} else {
|
||||
elasticsearchPort = embeddedElasticSearch().getHttpPort();
|
||||
}
|
||||
return new ElasticsearchSvcImpl(elasticsearchHost, elasticsearchPort, elasticsearchUserId, elasticsearchPassword);
|
||||
}
|
||||
|
||||
@Bean
|
||||
public EmbeddedElastic embeddedElasticSearch() {
|
||||
String ELASTIC_VERSION = "6.5.4";
|
||||
|
||||
EmbeddedElastic embeddedElastic = null;
|
||||
if(!ContextHolder.isExternalElasticsearch()) {
|
||||
try {
|
||||
embeddedElastic = EmbeddedElastic.builder()
|
||||
.withElasticVersion(ELASTIC_VERSION)
|
||||
|
@ -247,7 +152,6 @@ public class CommonConfig {
|
|||
} catch (IOException | InterruptedException e) {
|
||||
throw new ConfigurationException(e);
|
||||
}
|
||||
}
|
||||
|
||||
return embeddedElastic;
|
||||
}
|
||||
|
|
|
@ -33,8 +33,6 @@ public class ContextHolder {
|
|||
private static String ourPath;
|
||||
private static Long ourReuseSearchResultsMillis;
|
||||
private static String ourDatabaseUrl;
|
||||
private static boolean myExternalElasticsearch = false;
|
||||
private static boolean myPostGreSql = false;
|
||||
private static Integer myDefaultPageSize = 10;
|
||||
private static Integer myMaxPageSize = 50;
|
||||
|
||||
|
@ -105,22 +103,6 @@ public class ContextHolder {
|
|||
ourDatabaseUrl = theDatabaseUrl;
|
||||
}
|
||||
|
||||
public static void setExternalElasticsearch(Boolean theExternalElasticsearch) {
|
||||
myExternalElasticsearch = theExternalElasticsearch;
|
||||
}
|
||||
|
||||
public static Boolean isExternalElasticsearch() {
|
||||
return myExternalElasticsearch;
|
||||
}
|
||||
|
||||
public static void setPostgreSql(boolean thePostGreSql) {
|
||||
myPostGreSql = thePostGreSql;
|
||||
}
|
||||
|
||||
public static boolean isPostGreSql() {
|
||||
return myPostGreSql;
|
||||
}
|
||||
|
||||
public static void setDefaultPageSize(Integer theDefaultPageSize) {
|
||||
myDefaultPageSize = theDefaultPageSize;
|
||||
}
|
||||
|
|
|
@ -122,7 +122,6 @@ public class JpaServerDemo extends RestfulServer {
|
|||
myAppCtx.getBean(DaoConfig.class));
|
||||
confProvider.setImplementationDescription("Example Server");
|
||||
setServerConformanceProvider(confProvider);
|
||||
|
||||
} else if (fhirVersion == FhirVersionEnum.DSTU3) {
|
||||
IFhirSystemDao<org.hl7.fhir.dstu3.model.Bundle, org.hl7.fhir.dstu3.model.Meta> systemDao = myAppCtx
|
||||
.getBean("mySystemDaoDstu3", IFhirSystemDao.class);
|
||||
|
@ -161,7 +160,6 @@ public class JpaServerDemo extends RestfulServer {
|
|||
/*
|
||||
* This is a simple paging strategy that keeps the last 10 searches in memory
|
||||
*/
|
||||
// TODO: Make this configurable via the ContextHolder
|
||||
setPagingProvider(new FifoMemoryPagingProvider(10).setDefaultPageSize(ContextHolder.getDefaultPageSize()).setMaximumPageSize(ContextHolder.getMaxPageSize()));
|
||||
|
||||
// Register a CORS filter
|
||||
|
|
|
@ -618,11 +618,6 @@
|
|||
<version>1.0-SNAPSHOT</version>
|
||||
<classifier>shaded6</classifier>
|
||||
</dependency>
|
||||
<!-- <dependency>
|
||||
<groupId>org.elasticsearch.client</groupId>
|
||||
<artifactId>elasticsearch-rest-high-level-client</artifactId>
|
||||
<version>5.6.16</version>
|
||||
</dependency> -->
|
||||
|
||||
</dependencies>
|
||||
|
||||
|
|
|
@ -270,7 +270,7 @@ public abstract class BaseConfig {
|
|||
|
||||
public static void configureEntityManagerFactory(LocalContainerEntityManagerFactoryBean theFactory, FhirContext theCtx) {
|
||||
theFactory.setJpaDialect(hibernateJpaDialect(theCtx.getLocalizer()));
|
||||
// TODO: Looking at moving the lastn entities into jpa.model.entity package.
|
||||
// TODO: Looking at moving the lastn entities into jpa.model.entity package. Note that moving the lastn entities may require re-building elasticsearch indexes.
|
||||
theFactory.setPackagesToScan("ca.uhn.fhir.jpa.model.entity", "ca.uhn.fhir.jpa.entity", "ca.uhn.fhir.jpa.dao.lastn.entity");
|
||||
theFactory.setPersistenceProvider(new HibernatePersistenceProvider());
|
||||
}
|
||||
|
|
|
@ -0,0 +1,45 @@
|
|||
package ca.uhn.fhir.jpa.dao;
|
||||
|
||||
/*
|
||||
* #%L
|
||||
* HAPI FHIR JPA Server
|
||||
* %%
|
||||
* Copyright (C) 2014 - 2020 University Health Network
|
||||
* %%
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoObservation;
|
||||
import ca.uhn.fhir.jpa.search.lastn.IndexConstants;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.rest.api.*;
|
||||
import ca.uhn.fhir.rest.api.server.*;
|
||||
import org.hl7.fhir.instance.model.api.*;
|
||||
|
||||
public abstract class BaseHapiFhirResourceDaoObservation<T extends IBaseResource> extends BaseHapiFhirResourceDao<T> implements IFhirResourceDaoObservation<T> {
|
||||
|
||||
protected void updateSearchParamsForLastn(SearchParameterMap theSearchParameterMap, RequestDetails theRequestDetails) {
|
||||
if (!isPagingProviderDatabaseBacked(theRequestDetails)) {
|
||||
theSearchParameterMap.setLoadSynchronous(true);
|
||||
}
|
||||
|
||||
theSearchParameterMap.setLastN(true);
|
||||
if (theSearchParameterMap.getSort() == null) {
|
||||
SortSpec effectiveDtm = new SortSpec("date").setOrder(SortOrderEnum.DESC);
|
||||
SortSpec observationCode = new SortSpec(IndexConstants.CODE_SEARCH_PARAM).setOrder(SortOrderEnum.ASC).setChain(effectiveDtm);
|
||||
theSearchParameterMap.setSort(new SortSpec(IndexConstants.SUBJECT_SEARCH_PARAM).setChain(observationCode));
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -348,7 +348,17 @@ public class SearchBuilder implements ISearchBuilder {
|
|||
}
|
||||
|
||||
if (myParams.isLastN()) {
|
||||
lastnPids = myIElasticsearchSvc.executeLastN(myParams, theRequest, myIdHelperService);
|
||||
Integer myMaxObservationsPerCode = null;
|
||||
String[] maxCountParams = theRequest.getParameters().get("max");
|
||||
if (maxCountParams != null && maxCountParams.length > 0) {
|
||||
myMaxObservationsPerCode = Integer.valueOf(maxCountParams[0]);
|
||||
} else {
|
||||
throw new InvalidRequestException("Max parameter is required for $lastn operation");
|
||||
}
|
||||
List<String> lastnResourceIds = myIElasticsearchSvc.executeLastN(myParams, myMaxObservationsPerCode);
|
||||
for (String lastnResourceId : lastnResourceIds) {
|
||||
lastnPids.add(myIdHelperService.resolveResourcePersistentIds(myResourceName, lastnResourceId));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -70,6 +70,7 @@ public class ObservationLastNIndexPersistDstu3Svc {
|
|||
codeableConceptField.addCoding(new ObservationIndexedCodeCodingEntity(codeCoding.getSystem(), codeCoding.getCode(), codeCoding.getDisplay(), observationCodeNormalizedId));
|
||||
}
|
||||
myObservationIndexedCodeableConceptSearchParamDao.save(codeableConceptField);
|
||||
codeableConceptField = myObservationIndexedCodeableConceptSearchParamDao.findByCodeableConceptId(observationCodeNormalizedId);
|
||||
|
||||
indexedObservation.setObservationCode(codeableConceptField);
|
||||
indexedObservation.setCodeNormalizedId(observationCodeNormalizedId);
|
||||
|
|
|
@ -70,6 +70,7 @@ public class ObservationLastNIndexPersistR5Svc {
|
|||
codeableConceptField.addCoding(new ObservationIndexedCodeCodingEntity(codeCoding.getSystem(), codeCoding.getCode(), codeCoding.getDisplay(), observationCodeNormalizedId));
|
||||
}
|
||||
myObservationIndexedCodeableConceptSearchParamDao.save(codeableConceptField);
|
||||
codeableConceptField = myObservationIndexedCodeableConceptSearchParamDao.findByCodeableConceptId(observationCodeNormalizedId);
|
||||
|
||||
indexedObservation.setObservationCode(codeableConceptField);
|
||||
indexedObservation.setCodeNormalizedId(observationCodeNormalizedId);
|
||||
|
|
|
@ -22,6 +22,7 @@ package ca.uhn.fhir.jpa.dao.r4;
|
|||
|
||||
import ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoObservation;
|
||||
import ca.uhn.fhir.jpa.dao.BaseHapiFhirResourceDaoObservation;
|
||||
import ca.uhn.fhir.jpa.dao.lastn.ObservationLastNIndexPersistR4Svc;
|
||||
import ca.uhn.fhir.jpa.model.cross.IBasePersistedResource;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
|
@ -38,16 +39,15 @@ import org.springframework.beans.factory.annotation.Autowired;
|
|||
import javax.servlet.http.HttpServletResponse;
|
||||
import java.util.Date;
|
||||
|
||||
public class FhirResourceDaoObservationR4 extends BaseHapiFhirResourceDao<Observation> implements IFhirResourceDaoObservation<Observation> {
|
||||
public class FhirResourceDaoObservationR4 extends BaseHapiFhirResourceDaoObservation<Observation> {
|
||||
|
||||
@Autowired
|
||||
ObservationLastNIndexPersistR4Svc myObservationLastNIndexPersistR4Svc;
|
||||
|
||||
@Override
|
||||
public IBundleProvider observationsLastN(SearchParameterMap theSearchParameterMap, RequestDetails theRequestDetails, HttpServletResponse theServletResponse) {
|
||||
if (!isPagingProviderDatabaseBacked(theRequestDetails)) {
|
||||
theSearchParameterMap.setLoadSynchronous(true);
|
||||
}
|
||||
|
||||
updateSearchParamsForLastn(theSearchParameterMap, theRequestDetails);
|
||||
|
||||
return mySearchCoordinatorSvc.registerSearch(this, theSearchParameterMap, getResourceName(), new CacheControlDirective().parse(theRequestDetails.getHeaders(Constants.HEADER_CACHE_CONTROL)), theRequestDetails);
|
||||
}
|
||||
|
|
|
@ -129,7 +129,7 @@ public class BaseJpaResourceProviderObservationR4 extends JpaResourceProviderR4<
|
|||
paramMap.setRevIncludes(theRevIncludes);
|
||||
paramMap.setLastUpdated(theLastUpdated);
|
||||
paramMap.setIncludes(theIncludes);
|
||||
paramMap.setLastN(true);
|
||||
/* paramMap.setLastN(true);
|
||||
if (theSort == null) {
|
||||
SortSpec effectiveDtm = new SortSpec("date").setOrder(SortOrderEnum.DESC);
|
||||
SortSpec observationCode = new SortSpec("code").setOrder(SortOrderEnum.ASC).setChain(effectiveDtm);
|
||||
|
@ -138,7 +138,7 @@ public class BaseJpaResourceProviderObservationR4 extends JpaResourceProviderR4<
|
|||
} else {
|
||||
theSort = new SortSpec("subject").setChain(observationCode);
|
||||
}
|
||||
}
|
||||
} */
|
||||
paramMap.setSort(theSort);
|
||||
paramMap.setCount(theCount);
|
||||
paramMap.setSummaryMode(theSummaryMode);
|
||||
|
|
|
@ -1,188 +0,0 @@
|
|||
package ca.uhn.fhir.jpa.search.lastn;
|
||||
/*
|
||||
import org.shadehapi.elasticsearch.action.DocWriteRequest;
|
||||
import org.shadehapi.elasticsearch.action.admin.indices.create.CreateIndexRequest;
|
||||
import org.shadehapi.elasticsearch.action.admin.indices.create.CreateIndexResponse;
|
||||
import org.shadehapi.elasticsearch.action.admin.indices.get.GetIndexRequest;
|
||||
import org.shadehapi.elasticsearch.action.bulk.BulkItemResponse;
|
||||
import org.shadehapi.elasticsearch.action.bulk.BulkRequest;
|
||||
import org.shadehapi.elasticsearch.action.bulk.BulkResponse;
|
||||
import org.shadehapi.elasticsearch.action.index.IndexRequest;
|
||||
import org.shadehapi.elasticsearch.client.RequestOptions;
|
||||
import org.shadehapi.elasticsearch.client.RestHighLevelClient;
|
||||
import org.shadehapi.elasticsearch.common.xcontent.XContentType;
|
||||
*/
|
||||
import java.io.IOException;
|
||||
|
||||
public class ElasticsearchBulkIndexSvcImpl {
|
||||
|
||||
// RestHighLevelClient myRestHighLevelClient;
|
||||
|
||||
// BulkRequest myBulkRequest = null;
|
||||
|
||||
public ElasticsearchBulkIndexSvcImpl(String theHostname, int thePort, String theUsername, String thePassword) {
|
||||
|
||||
// myRestHighLevelClient = ElasticsearchRestClientFactory.createElasticsearchHighLevelRestClient(theHostname, thePort, theUsername,thePassword);
|
||||
|
||||
try {
|
||||
createObservationIndexIfMissing();
|
||||
createCodeIndexIfMissing();
|
||||
} catch (IOException theE) {
|
||||
throw new RuntimeException("Failed to create document index", theE);
|
||||
}
|
||||
}
|
||||
|
||||
public void createObservationIndexIfMissing() throws IOException {
|
||||
if(indexExists(IndexConstants.OBSERVATION_INDEX)) {
|
||||
return;
|
||||
}
|
||||
String observationMapping = "{\n" +
|
||||
" \"mappings\" : {\n" +
|
||||
" \"ca.uhn.fhir.jpa.dao.lastn.entity.ObservationIndexedSearchParamLastNEntity\" : {\n" +
|
||||
" \"properties\" : {\n" +
|
||||
" \"codeconceptid\" : {\n" +
|
||||
" \"type\" : \"keyword\",\n" +
|
||||
" \"norms\" : true\n" +
|
||||
" },\n" +
|
||||
" \"codeconcepttext\" : {\n" +
|
||||
" \"type\" : \"text\"\n" +
|
||||
" },\n" +
|
||||
" \"codeconceptcodingcode\" : {\n" +
|
||||
" \"type\" : \"keyword\"\n" +
|
||||
" },\n" +
|
||||
" \"codeconceptcodingsystem\" : {\n" +
|
||||
" \"type\" : \"keyword\"\n" +
|
||||
" },\n" +
|
||||
" \"codeconceptcodingcode_system_hash\" : {\n" +
|
||||
" \"type\" : \"keyword\"\n" +
|
||||
" },\n" +
|
||||
" \"codeconceptcodingdisplay\" : {\n" +
|
||||
" \"type\" : \"text\"\n" +
|
||||
" },\n" +
|
||||
" \"categoryconcepttext\" : {\n" +
|
||||
" \"type\" : \"text\"\n" +
|
||||
" },\n" +
|
||||
" \"categoryconceptcodingcode\" : {\n" +
|
||||
" \"type\" : \"keyword\"\n" +
|
||||
" },\n" +
|
||||
" \"categoryconceptcodingsystem\" : {\n" +
|
||||
" \"type\" : \"keyword\"\n" +
|
||||
" },\n" +
|
||||
" \"categoryconceptcodingcode_system_hash\" : {\n" +
|
||||
" \"type\" : \"keyword\"\n" +
|
||||
" },\n" +
|
||||
" \"categoryconceptcodingdisplay\" : {\n" +
|
||||
" \"type\" : \"text\"\n" +
|
||||
" },\n" +
|
||||
" \"effectivedtm\" : {\n" +
|
||||
" \"type\" : \"date\"\n" +
|
||||
" },\n" +
|
||||
" \"identifier\" : {\n" +
|
||||
" \"type\" : \"keyword\",\n" +
|
||||
" \"store\" : true\n" +
|
||||
" },\n" +
|
||||
" \"subject\" : {\n" +
|
||||
" \"type\" : \"keyword\"\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
"}\n";
|
||||
if(!createIndex(IndexConstants.OBSERVATION_INDEX, observationMapping)) {
|
||||
throw new RuntimeException("Failed to create observation index");
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public void createCodeIndexIfMissing() throws IOException {
|
||||
if(indexExists(IndexConstants.CODE_INDEX)) {
|
||||
return;
|
||||
}
|
||||
String codeMapping = "{\n" +
|
||||
" \"mappings\" : {\n" +
|
||||
" \"ca.uhn.fhir.jpa.dao.lastn.entity.ObservationIndexedCodeCodeableConceptEntity\" : {\n" +
|
||||
" \"properties\" : {\n" +
|
||||
" \"codeable_concept_id\" : {\n" +
|
||||
" \"type\" : \"keyword\",\n" +
|
||||
" \"store\" : true\n" +
|
||||
" },\n" +
|
||||
" \"codeable_concept_text\" : {\n" +
|
||||
" \"type\" : \"text\"\n" +
|
||||
" },\n" +
|
||||
" \"codingcode\" : {\n" +
|
||||
" \"type\" : \"keyword\"\n" +
|
||||
" },\n" +
|
||||
" \"codingcode_system_hash\" : {\n" +
|
||||
" \"type\" : \"keyword\"\n" +
|
||||
" },\n" +
|
||||
" \"codingdisplay\" : {\n" +
|
||||
" \"type\" : \"text\"\n" +
|
||||
" },\n" +
|
||||
" \"codingsystem\" : {\n" +
|
||||
" \"type\" : \"keyword\"\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
"}\n";
|
||||
if (!createIndex(IndexConstants.CODE_INDEX, codeMapping)) {
|
||||
throw new RuntimeException("Failed to create code index");
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public boolean createIndex(String theIndexName, String theMapping) throws IOException {
|
||||
/* CreateIndexRequest request = new CreateIndexRequest(theIndexName);
|
||||
request.source(theMapping, XContentType.JSON);
|
||||
CreateIndexResponse createIndexResponse = myRestHighLevelClient.indices().create(request, RequestOptions.DEFAULT);
|
||||
return createIndexResponse.isAcknowledged();
|
||||
*/
|
||||
return false;
|
||||
}
|
||||
|
||||
public boolean indexExists(String theIndexName) throws IOException {
|
||||
/* GetIndexRequest request = new GetIndexRequest();
|
||||
request.indices(theIndexName);
|
||||
return myRestHighLevelClient.indices().exists(request, RequestOptions.DEFAULT);
|
||||
|
||||
*/
|
||||
return false;
|
||||
}
|
||||
|
||||
public void addToBulkIndexRequest(String theIndexName, String theDocumentId, String theObservationDocument, String theDocumentType) {
|
||||
/* IndexRequest request = new IndexRequest(theIndexName);
|
||||
request.id(theDocumentId);
|
||||
request.type(theDocumentType);
|
||||
|
||||
request.source(theObservationDocument, XContentType.JSON);
|
||||
|
||||
if (myBulkRequest == null) {
|
||||
myBulkRequest = new BulkRequest();
|
||||
}
|
||||
myBulkRequest.add(request); */
|
||||
}
|
||||
|
||||
public void executeBulkIndex() throws IOException {
|
||||
/* if (myBulkRequest == null) {
|
||||
throw new RuntimeException(("No index requests have been added to the bulk request"));
|
||||
}
|
||||
BulkResponse bulkResponse = myRestHighLevelClient.bulk(myBulkRequest, RequestOptions.DEFAULT);
|
||||
for (BulkItemResponse bulkItemResponse : bulkResponse) {
|
||||
if (bulkItemResponse.getOpType() != DocWriteRequest.OpType.CREATE && bulkItemResponse.getOpType() != DocWriteRequest.OpType.INDEX) {
|
||||
throw new RuntimeException("Unexpected response for bulk index request: " + bulkItemResponse.getOpType());
|
||||
}
|
||||
}
|
||||
myBulkRequest = null; */
|
||||
}
|
||||
|
||||
public boolean bulkRequestPending() {
|
||||
// return (myBulkRequest != null);
|
||||
return false;
|
||||
}
|
||||
|
||||
public void closeClient() throws IOException {
|
||||
// myRestHighLevelClient.close();
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -1,20 +1,16 @@
|
|||
package ca.uhn.fhir.jpa.search.lastn;
|
||||
|
||||
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
|
||||
import ca.uhn.fhir.jpa.model.cross.ResourcePersistentId;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.model.api.IQueryParameterType;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.param.NumberParam;
|
||||
import ca.uhn.fhir.rest.param.ReferenceParam;
|
||||
import ca.uhn.fhir.rest.param.TokenParam;
|
||||
import ca.uhn.fhir.jpa.search.lastn.json.CodeJson;
|
||||
import ca.uhn.fhir.jpa.search.lastn.json.ObservationJson;
|
||||
import ca.uhn.fhir.jpa.search.lastn.util.CodeSystemHash;
|
||||
import ca.uhn.fhir.rest.server.RestfulServerUtils;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import org.shadehapi.elasticsearch.action.admin.indices.create.CreateIndexRequest;
|
||||
import org.shadehapi.elasticsearch.action.admin.indices.create.CreateIndexResponse;
|
||||
import org.shadehapi.elasticsearch.action.admin.indices.get.GetIndexRequest;
|
||||
|
@ -55,9 +51,12 @@ public class ElasticsearchSvcImpl implements IElasticsearchSvc {
|
|||
|
||||
ObjectMapper objectMapper = new ObjectMapper();
|
||||
|
||||
private final String GROUP_BY_SUBJECT = "group_by_subject";
|
||||
private final String GROUP_BY_CODE = "group_by_code";
|
||||
|
||||
|
||||
public ElasticsearchSvcImpl(String theHostname, int thePort, String theUsername, String thePassword) {
|
||||
myRestHighLevelClient = ElasticsearchRestClientFactory.createElasticsearchHighLevelRestClient(theHostname, thePort, theUsername,thePassword);
|
||||
myRestHighLevelClient = ElasticsearchRestClientFactory.createElasticsearchHighLevelRestClient(theHostname, thePort, theUsername, thePassword);
|
||||
|
||||
try {
|
||||
createObservationIndexIfMissing();
|
||||
|
@ -68,7 +67,7 @@ public class ElasticsearchSvcImpl implements IElasticsearchSvc {
|
|||
}
|
||||
|
||||
private void createObservationIndexIfMissing() throws IOException {
|
||||
if(indexExists(IndexConstants.OBSERVATION_INDEX)) {
|
||||
if (indexExists(IndexConstants.OBSERVATION_INDEX)) {
|
||||
return;
|
||||
}
|
||||
String observationMapping = "{\n" +
|
||||
|
@ -123,14 +122,14 @@ public class ElasticsearchSvcImpl implements IElasticsearchSvc {
|
|||
" }\n" +
|
||||
" }\n" +
|
||||
"}\n";
|
||||
if(!createIndex(IndexConstants.OBSERVATION_INDEX, observationMapping)) {
|
||||
if (!createIndex(IndexConstants.OBSERVATION_INDEX, observationMapping)) {
|
||||
throw new RuntimeException("Failed to create observation index");
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private void createCodeIndexIfMissing() throws IOException {
|
||||
if(indexExists(IndexConstants.CODE_INDEX)) {
|
||||
if (indexExists(IndexConstants.CODE_INDEX)) {
|
||||
return;
|
||||
}
|
||||
String codeMapping = "{\n" +
|
||||
|
@ -174,8 +173,9 @@ public class ElasticsearchSvcImpl implements IElasticsearchSvc {
|
|||
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
boolean performIndex(String theIndexName, String theDocumentId, String theIndexDocument, String theDocumentType) throws IOException {
|
||||
IndexResponse indexResponse = myRestHighLevelClient.index(createIndexRequest(theIndexName, theDocumentId,theIndexDocument,theDocumentType),
|
||||
IndexResponse indexResponse = myRestHighLevelClient.index(createIndexRequest(theIndexName, theDocumentId, theIndexDocument, theDocumentType),
|
||||
RequestOptions.DEFAULT);
|
||||
|
||||
return (indexResponse.getResult() == DocWriteResponse.Result.CREATED) || (indexResponse.getResult() == DocWriteResponse.Result.UPDATED);
|
||||
|
@ -197,24 +197,36 @@ public class ElasticsearchSvcImpl implements IElasticsearchSvc {
|
|||
}
|
||||
|
||||
@Override
|
||||
public List<ResourcePersistentId> executeLastN(SearchParameterMap theSearchParameterMap, RequestDetails theRequestDetails, IdHelperService theIdHelperService) {
|
||||
Integer myMaxObservationsPerCode = 1;
|
||||
String[] maxCountParams = theRequestDetails.getParameters().get("max");
|
||||
if (maxCountParams != null && maxCountParams.length > 0) {
|
||||
myMaxObservationsPerCode = Integer.valueOf(maxCountParams[0]);
|
||||
}
|
||||
SearchRequest myLastNRequest = buildObservationCompositeSearchRequest(10000, theSearchParameterMap, myMaxObservationsPerCode);
|
||||
SearchResponse lastnResponse = null;
|
||||
public List<String> executeLastN(SearchParameterMap theSearchParameterMap, Integer theMaxObservationsPerCode) {
|
||||
String[] topHitsInclude = {"identifier"};
|
||||
SearchRequest myLastNRequest = buildObservationsSearchRequest(theSearchParameterMap, createCompositeAggregationBuilder(theMaxObservationsPerCode, topHitsInclude));
|
||||
try {
|
||||
lastnResponse = executeSearchRequest(myLastNRequest);
|
||||
List<ResourcePersistentId> observationIds = buildObservationIdList(lastnResponse, theIdHelperService);
|
||||
return observationIds;
|
||||
SearchResponse lastnResponse = executeSearchRequest(myLastNRequest);
|
||||
return buildObservationIdList(lastnResponse);
|
||||
} catch (IOException theE) {
|
||||
throw new InvalidRequestException("Unable to execute LastN request", theE);
|
||||
}
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
List<ObservationJson> executeLastNWithAllFields(SearchParameterMap theSearchParameterMap, Integer theMaxObservationsPerCode) {
|
||||
SearchRequest myLastNRequest = buildObservationsSearchRequest(theSearchParameterMap, createCompositeAggregationBuilder(theMaxObservationsPerCode, null));
|
||||
try {
|
||||
SearchResponse lastnResponse = executeSearchRequest(myLastNRequest);
|
||||
return buildObservationDocumentList(lastnResponse);
|
||||
} catch (IOException theE) {
|
||||
throw new InvalidRequestException("Unable to execute LastN request", theE);
|
||||
}
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
List<CodeJson> queryAllIndexedObservationCodes(int theMaxResultSetSize) throws IOException {
|
||||
SearchRequest codeSearchRequest = buildObservationCodesSearchRequest(theMaxResultSetSize);
|
||||
SearchResponse codeSearchResponse = executeSearchRequest(codeSearchRequest);
|
||||
return buildCodeResult(codeSearchResponse);
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
SearchRequest buildObservationCodesSearchRequest(int theMaxResultSetSize) {
|
||||
SearchRequest searchRequest = new SearchRequest(IndexConstants.CODE_INDEX);
|
||||
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
|
||||
|
@ -225,110 +237,75 @@ public class ElasticsearchSvcImpl implements IElasticsearchSvc {
|
|||
return searchRequest;
|
||||
}
|
||||
|
||||
private CompositeAggregationBuilder createCompositeAggregationBuilder(int theMaximumResultSetSize, int theMaxNumberObservationsPerCode, String[] theTopHitsInclude) {
|
||||
TermsAggregationBuilder observationCodeAggregationBuilder = new TermsAggregationBuilder("group_by_code", ValueType.STRING).field("codeconceptid");
|
||||
private CompositeAggregationBuilder createCompositeAggregationBuilder(int theMaxNumberObservationsPerCode, String[] theTopHitsInclude) {
|
||||
TermsAggregationBuilder observationCodeAggregationBuilder = new TermsAggregationBuilder(GROUP_BY_CODE, ValueType.STRING).field("codeconceptid");
|
||||
// Top Hits Aggregation
|
||||
observationCodeAggregationBuilder.subAggregation(AggregationBuilders.topHits("most_recent_effective")
|
||||
.sort("effectivedtm", SortOrder.DESC)
|
||||
.fetchSource(theTopHitsInclude, null).size(theMaxNumberObservationsPerCode));
|
||||
observationCodeAggregationBuilder.size(theMaximumResultSetSize);
|
||||
observationCodeAggregationBuilder.size(10000);
|
||||
CompositeValuesSourceBuilder<?> subjectValuesBuilder = new TermsValuesSourceBuilder("subject").field("subject");
|
||||
List<CompositeValuesSourceBuilder<?>> compositeAggSubjectSources = new ArrayList();
|
||||
compositeAggSubjectSources.add(subjectValuesBuilder);
|
||||
CompositeAggregationBuilder compositeAggregationSubjectBuilder = new CompositeAggregationBuilder("group_by_subject", compositeAggSubjectSources);
|
||||
CompositeAggregationBuilder compositeAggregationSubjectBuilder = new CompositeAggregationBuilder(GROUP_BY_SUBJECT, compositeAggSubjectSources);
|
||||
compositeAggregationSubjectBuilder.subAggregation(observationCodeAggregationBuilder);
|
||||
compositeAggregationSubjectBuilder.size(theMaximumResultSetSize);
|
||||
compositeAggregationSubjectBuilder.size(10000);
|
||||
|
||||
return compositeAggregationSubjectBuilder;
|
||||
}
|
||||
|
||||
private TermsAggregationBuilder createTermsAggregationBuilder(int theMaximumResultSetSize, int theMaxNumberObservationsPerCode, String[] theTopHitsInclude) {
|
||||
TermsAggregationBuilder observationCodeAggregationBuilder = new TermsAggregationBuilder("group_by_code", ValueType.STRING).field("codeconceptid");
|
||||
// Top Hits Aggregation
|
||||
observationCodeAggregationBuilder.subAggregation(AggregationBuilders.topHits("most_recent_effective")
|
||||
.sort("effectivedtm", SortOrder.DESC).fetchSource(theTopHitsInclude, null).size(theMaxNumberObservationsPerCode));
|
||||
observationCodeAggregationBuilder.size(theMaximumResultSetSize);
|
||||
TermsAggregationBuilder subjectsBuilder = new TermsAggregationBuilder("group_by_subject", ValueType.STRING).field("subject");
|
||||
subjectsBuilder.subAggregation(observationCodeAggregationBuilder);
|
||||
subjectsBuilder.size(theMaximumResultSetSize);
|
||||
return subjectsBuilder;
|
||||
}
|
||||
|
||||
public SearchResponse executeSearchRequest(SearchRequest searchRequest) throws IOException {
|
||||
private SearchResponse executeSearchRequest(SearchRequest searchRequest) throws IOException {
|
||||
return myRestHighLevelClient.search(searchRequest, RequestOptions.DEFAULT);
|
||||
}
|
||||
|
||||
public List<ObservationJson> buildObservationCompositeResults(SearchResponse theSearchResponse) throws IOException {
|
||||
|
||||
private List<String> buildObservationIdList(SearchResponse theSearchResponse) throws IOException {
|
||||
List<String> theObservationList = new ArrayList<>();
|
||||
for (ParsedComposite.ParsedBucket subjectBucket : getSubjectBuckets(theSearchResponse)) {
|
||||
for (Terms.Bucket observationCodeBucket : getObservationCodeBuckets(subjectBucket)) {
|
||||
for (SearchHit lastNMatch : getLastNMatches(observationCodeBucket)) {
|
||||
String indexedObservation = lastNMatch.getSourceAsString();
|
||||
ObservationJson observationJson = objectMapper.readValue(indexedObservation, ObservationJson.class);
|
||||
theObservationList.add(observationJson.getIdentifier());
|
||||
}
|
||||
}
|
||||
}
|
||||
return theObservationList;
|
||||
}
|
||||
|
||||
private List<ObservationJson> buildObservationDocumentList(SearchResponse theSearchResponse) throws IOException {
|
||||
List<ObservationJson> theObservationList = new ArrayList<>();
|
||||
for (ParsedComposite.ParsedBucket subjectBucket : getSubjectBuckets(theSearchResponse)) {
|
||||
for (Terms.Bucket observationCodeBucket : getObservationCodeBuckets(subjectBucket)) {
|
||||
for (SearchHit lastNMatch : getLastNMatches(observationCodeBucket)) {
|
||||
String indexedObservation = lastNMatch.getSourceAsString();
|
||||
ObservationJson observationJson = objectMapper.readValue(indexedObservation, ObservationJson.class);
|
||||
theObservationList.add(observationJson);
|
||||
}
|
||||
}
|
||||
}
|
||||
return theObservationList;
|
||||
}
|
||||
|
||||
private List<ParsedComposite.ParsedBucket> getSubjectBuckets(SearchResponse theSearchResponse) {
|
||||
Aggregations responseAggregations = theSearchResponse.getAggregations();
|
||||
ParsedComposite aggregatedSubjects = responseAggregations.get("group_by_subject");
|
||||
List<ParsedComposite.ParsedBucket> subjectBuckets = aggregatedSubjects.getBuckets();
|
||||
List<ObservationJson> codes = new ArrayList<>();
|
||||
for(ParsedComposite.ParsedBucket subjectBucket : subjectBuckets) {
|
||||
Aggregations observationCodeAggregations = subjectBucket.getAggregations();
|
||||
ParsedTerms aggregatedObservationCodes = observationCodeAggregations.get("group_by_code");
|
||||
List<? extends Terms.Bucket> observationCodeBuckets = aggregatedObservationCodes.getBuckets();
|
||||
for (Terms.Bucket observationCodeBucket : observationCodeBuckets) {
|
||||
Aggregations topHitObservationCodes = observationCodeBucket.getAggregations();
|
||||
ParsedComposite aggregatedSubjects = responseAggregations.get(GROUP_BY_SUBJECT);
|
||||
return aggregatedSubjects.getBuckets();
|
||||
}
|
||||
|
||||
private List<? extends Terms.Bucket> getObservationCodeBuckets(ParsedComposite.ParsedBucket theSubjectBucket) {
|
||||
Aggregations observationCodeAggregations = theSubjectBucket.getAggregations();
|
||||
ParsedTerms aggregatedObservationCodes = observationCodeAggregations.get(GROUP_BY_CODE);
|
||||
return aggregatedObservationCodes.getBuckets();
|
||||
}
|
||||
|
||||
private SearchHit[] getLastNMatches(Terms.Bucket theObservationCodeBucket) {
|
||||
Aggregations topHitObservationCodes = theObservationCodeBucket.getAggregations();
|
||||
ParsedTopHits parsedTopHits = topHitObservationCodes.get("most_recent_effective");
|
||||
SearchHit[] topHits = parsedTopHits.getHits().getHits();
|
||||
for (SearchHit topHit : topHits) {
|
||||
String sources = topHit.getSourceAsString();
|
||||
ObservationJson code = objectMapper.readValue(sources, ObservationJson.class);
|
||||
codes.add(code);
|
||||
}
|
||||
}
|
||||
}
|
||||
return codes;
|
||||
return parsedTopHits.getHits().getHits();
|
||||
}
|
||||
|
||||
private List<ResourcePersistentId> buildObservationIdList(SearchResponse theSearchResponse, IdHelperService theIdHelperService) throws IOException {
|
||||
Aggregations responseAggregations = theSearchResponse.getAggregations();
|
||||
ParsedComposite aggregatedSubjects = responseAggregations.get("group_by_subject");
|
||||
List<ParsedComposite.ParsedBucket> subjectBuckets = aggregatedSubjects.getBuckets();
|
||||
List<ResourcePersistentId> myObservationIds = new ArrayList<>();
|
||||
for(ParsedComposite.ParsedBucket subjectBucket : subjectBuckets) {
|
||||
Aggregations observationCodeAggregations = subjectBucket.getAggregations();
|
||||
ParsedTerms aggregatedObservationCodes = observationCodeAggregations.get("group_by_code");
|
||||
List<? extends Terms.Bucket> observationCodeBuckets = aggregatedObservationCodes.getBuckets();
|
||||
for (Terms.Bucket observationCodeBucket : observationCodeBuckets) {
|
||||
Aggregations topHitObservationCodes = observationCodeBucket.getAggregations();
|
||||
ParsedTopHits parsedTopHits = topHitObservationCodes.get("most_recent_effective");
|
||||
SearchHit[] topHits = parsedTopHits.getHits().getHits();
|
||||
for (SearchHit topHit : topHits) {
|
||||
String sources = topHit.getSourceAsString();
|
||||
ObservationJson code = objectMapper.readValue(sources, ObservationJson.class);
|
||||
|
||||
myObservationIds.add(theIdHelperService.resolveResourcePersistentIds("Observation", code.getIdentifier()));
|
||||
}
|
||||
}
|
||||
}
|
||||
return myObservationIds;
|
||||
}
|
||||
|
||||
public List<ObservationJson> buildObservationTermsResults(SearchResponse theSearchResponse) throws IOException {
|
||||
Aggregations responseAggregations = theSearchResponse.getAggregations();
|
||||
ParsedTerms aggregatedSubjects = responseAggregations.get("group_by_subject");
|
||||
List<? extends Terms.Bucket> subjectBuckets = aggregatedSubjects.getBuckets();
|
||||
List<ObservationJson> codes = new ArrayList<>();
|
||||
for (Terms.Bucket subjectBucket : subjectBuckets) {
|
||||
Aggregations observationCodeAggregations = subjectBucket.getAggregations();
|
||||
ParsedTerms aggregatedObservationCodes = observationCodeAggregations.get("group_by_code");
|
||||
List<? extends Terms.Bucket> observationCodeBuckets = aggregatedObservationCodes.getBuckets();
|
||||
for (Terms.Bucket observationCodeBucket : observationCodeBuckets) {
|
||||
Aggregations topHitObservationCodes = observationCodeBucket.getAggregations();
|
||||
ParsedTopHits parsedTopHits = topHitObservationCodes.get("most_recent_effective");
|
||||
SearchHit[] topHits = parsedTopHits.getHits().getHits();
|
||||
for (SearchHit topHit : topHits) {
|
||||
String sources = topHit.getSourceAsString();
|
||||
ObservationJson code = objectMapper.readValue(sources,ObservationJson.class);
|
||||
codes.add(code);
|
||||
}
|
||||
}
|
||||
}
|
||||
return codes;
|
||||
}
|
||||
|
||||
public List<CodeJson> buildCodeResult(SearchResponse theSearchResponse) throws JsonProcessingException {
|
||||
private List<CodeJson> buildCodeResult(SearchResponse theSearchResponse) throws JsonProcessingException {
|
||||
SearchHits codeHits = theSearchResponse.getHits();
|
||||
List<CodeJson> codes = new ArrayList<>();
|
||||
for (SearchHit codeHit : codeHits) {
|
||||
|
@ -338,37 +315,16 @@ public class ElasticsearchSvcImpl implements IElasticsearchSvc {
|
|||
return codes;
|
||||
}
|
||||
|
||||
public SearchRequest buildObservationAllFieldsCompositeSearchRequest(int maxResultSetSize, SearchParameterMap theSearchParameterMap, int theMaxObservationsPerCode) {
|
||||
|
||||
return buildObservationsSearchRequest(theSearchParameterMap, createCompositeAggregationBuilder(maxResultSetSize, theMaxObservationsPerCode, null));
|
||||
}
|
||||
|
||||
public SearchRequest buildObservationCompositeSearchRequest(int maxResultSetSize, SearchParameterMap theSearchParameterMap, int theMaxObservationsPerCode) {
|
||||
// Return only identifiers
|
||||
String[] topHitsInclude = {"identifier","subject","effectivedtm","codeconceptid"};
|
||||
return buildObservationsSearchRequest(theSearchParameterMap, createCompositeAggregationBuilder(maxResultSetSize, theMaxObservationsPerCode, topHitsInclude));
|
||||
}
|
||||
|
||||
public SearchRequest buildObservationAllFieldsTermsSearchRequest(int maxResultSetSize, SearchParameterMap theSearchParameterMap, int theMaxObservationsPerCode) {
|
||||
return buildObservationsSearchRequest(theSearchParameterMap, createTermsAggregationBuilder(maxResultSetSize, theMaxObservationsPerCode, null));
|
||||
}
|
||||
|
||||
public SearchRequest buildObservationTermsSearchRequest(int maxResultSetSize, SearchParameterMap theSearchParameterMap, int theMaxObservationsPerCode) {
|
||||
// Return only identifiers
|
||||
String[] topHitsInclude = {"identifier","subject","effectivedtm","codeconceptid"};
|
||||
return buildObservationsSearchRequest(theSearchParameterMap, createTermsAggregationBuilder(maxResultSetSize, theMaxObservationsPerCode, topHitsInclude));
|
||||
}
|
||||
|
||||
private SearchRequest buildObservationsSearchRequest(SearchParameterMap theSearchParameterMap, AggregationBuilder theAggregationBuilder) {
|
||||
SearchRequest searchRequest = new SearchRequest(IndexConstants.OBSERVATION_INDEX);
|
||||
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
|
||||
// Query
|
||||
if(!searchParamsHaveLastNCriteria(theSearchParameterMap)) {
|
||||
if (!searchParamsHaveLastNCriteria(theSearchParameterMap)) {
|
||||
searchSourceBuilder.query(QueryBuilders.matchAllQuery());
|
||||
} else {
|
||||
BoolQueryBuilder boolQueryBuilder = QueryBuilders.boolQuery();
|
||||
addSubjectsCriteria(boolQueryBuilder,theSearchParameterMap);
|
||||
addCategoriesCriteria(boolQueryBuilder,theSearchParameterMap);
|
||||
addSubjectsCriteria(boolQueryBuilder, theSearchParameterMap);
|
||||
addCategoriesCriteria(boolQueryBuilder, theSearchParameterMap);
|
||||
addObservationCodeCriteria(boolQueryBuilder, theSearchParameterMap);
|
||||
searchSourceBuilder.query(boolQueryBuilder);
|
||||
}
|
||||
|
@ -383,19 +339,19 @@ public class ElasticsearchSvcImpl implements IElasticsearchSvc {
|
|||
|
||||
private Boolean searchParamsHaveLastNCriteria(SearchParameterMap theSearchParameterMap) {
|
||||
return theSearchParameterMap != null &&
|
||||
(theSearchParameterMap.containsKey("patient") || theSearchParameterMap.containsKey("subject") ||
|
||||
theSearchParameterMap.containsKey("category") || theSearchParameterMap.containsKey("code"));
|
||||
(theSearchParameterMap.containsKey(IndexConstants.PATIENT_SEARCH_PARAM) || theSearchParameterMap.containsKey(IndexConstants.SUBJECT_SEARCH_PARAM) ||
|
||||
theSearchParameterMap.containsKey(IndexConstants.CATEGORY_SEARCH_PARAM) || theSearchParameterMap.containsKey(IndexConstants.CODE_SEARCH_PARAM));
|
||||
}
|
||||
|
||||
private void addSubjectsCriteria(BoolQueryBuilder theBoolQueryBuilder, SearchParameterMap theSearchParameterMap) {
|
||||
if (theSearchParameterMap.containsKey("patient") || theSearchParameterMap.containsKey("subject")) {
|
||||
if (theSearchParameterMap.containsKey(IndexConstants.PATIENT_SEARCH_PARAM) || theSearchParameterMap.containsKey(IndexConstants.SUBJECT_SEARCH_PARAM)) {
|
||||
ArrayList<String> subjectReferenceCriteria = new ArrayList<>();
|
||||
List<List<IQueryParameterType>> andOrParams = new ArrayList<>();
|
||||
if (theSearchParameterMap.get("patient") != null) {
|
||||
andOrParams.addAll(theSearchParameterMap.get("patient"));
|
||||
if (theSearchParameterMap.get(IndexConstants.PATIENT_SEARCH_PARAM) != null) {
|
||||
andOrParams.addAll(theSearchParameterMap.get(IndexConstants.PATIENT_SEARCH_PARAM));
|
||||
}
|
||||
if (theSearchParameterMap.get("subject") != null) {
|
||||
andOrParams.addAll(theSearchParameterMap.get("subject"));
|
||||
if (theSearchParameterMap.get(IndexConstants.SUBJECT_SEARCH_PARAM) != null) {
|
||||
andOrParams.addAll(theSearchParameterMap.get(IndexConstants.SUBJECT_SEARCH_PARAM));
|
||||
}
|
||||
for (List<? extends IQueryParameterType> nextAnd : andOrParams) {
|
||||
subjectReferenceCriteria.addAll(getReferenceValues(nextAnd));
|
||||
|
@ -425,12 +381,12 @@ public class ElasticsearchSvcImpl implements IElasticsearchSvc {
|
|||
}
|
||||
|
||||
private void addCategoriesCriteria(BoolQueryBuilder theBoolQueryBuilder, SearchParameterMap theSearchParameterMap) {
|
||||
if (theSearchParameterMap.containsKey("category")) {
|
||||
if (theSearchParameterMap.containsKey(IndexConstants.CATEGORY_SEARCH_PARAM)) {
|
||||
ArrayList<String> codeSystemHashList = new ArrayList<>();
|
||||
ArrayList<String> codeOnlyList = new ArrayList<>();
|
||||
ArrayList<String> systemOnlyList = new ArrayList<>();
|
||||
ArrayList<String> textOnlyList = new ArrayList<>();
|
||||
List<List<IQueryParameterType>> andOrParams = theSearchParameterMap.get("category");
|
||||
List<List<IQueryParameterType>> andOrParams = theSearchParameterMap.get(IndexConstants.CATEGORY_SEARCH_PARAM);
|
||||
for (List<? extends IQueryParameterType> nextAnd : andOrParams) {
|
||||
codeSystemHashList.addAll(getCodingCodeSystemValues(nextAnd));
|
||||
codeOnlyList.addAll(getCodingCodeOnlyValues(nextAnd));
|
||||
|
@ -460,7 +416,7 @@ public class ElasticsearchSvcImpl implements IElasticsearchSvc {
|
|||
|
||||
private List<String> getCodingCodeSystemValues(List<? extends IQueryParameterType> codeParams) {
|
||||
ArrayList<String> codeSystemHashList = new ArrayList<>();
|
||||
for (IQueryParameterType nextOr : codeParams ) {
|
||||
for (IQueryParameterType nextOr : codeParams) {
|
||||
if (nextOr instanceof TokenParam) {
|
||||
TokenParam ref = (TokenParam) nextOr;
|
||||
if (ref.getSystem() != null && ref.getValue() != null) {
|
||||
|
@ -473,7 +429,7 @@ public class ElasticsearchSvcImpl implements IElasticsearchSvc {
|
|||
return codeSystemHashList;
|
||||
}
|
||||
|
||||
private List<String> getCodingCodeOnlyValues(List<? extends IQueryParameterType> codeParams ) {
|
||||
private List<String> getCodingCodeOnlyValues(List<? extends IQueryParameterType> codeParams) {
|
||||
ArrayList<String> codeOnlyList = new ArrayList<>();
|
||||
for (IQueryParameterType nextOr : codeParams) {
|
||||
|
||||
|
@ -489,7 +445,7 @@ public class ElasticsearchSvcImpl implements IElasticsearchSvc {
|
|||
return codeOnlyList;
|
||||
}
|
||||
|
||||
private List<String> getCodingSystemOnlyValues(List<? extends IQueryParameterType> codeParams ) {
|
||||
private List<String> getCodingSystemOnlyValues(List<? extends IQueryParameterType> codeParams) {
|
||||
ArrayList<String> systemOnlyList = new ArrayList<>();
|
||||
for (IQueryParameterType nextOr : codeParams) {
|
||||
|
||||
|
@ -505,9 +461,9 @@ public class ElasticsearchSvcImpl implements IElasticsearchSvc {
|
|||
return systemOnlyList;
|
||||
}
|
||||
|
||||
private List<String> getCodingTextOnlyValues(List<? extends IQueryParameterType> codeParams ) {
|
||||
private List<String> getCodingTextOnlyValues(List<? extends IQueryParameterType> codeParams) {
|
||||
ArrayList<String> textOnlyList = new ArrayList<>();
|
||||
for (IQueryParameterType nextOr : codeParams ) {
|
||||
for (IQueryParameterType nextOr : codeParams) {
|
||||
|
||||
if (nextOr instanceof TokenParam) {
|
||||
TokenParam ref = (TokenParam) nextOr;
|
||||
|
@ -522,12 +478,12 @@ public class ElasticsearchSvcImpl implements IElasticsearchSvc {
|
|||
}
|
||||
|
||||
private void addObservationCodeCriteria(BoolQueryBuilder theBoolQueryBuilder, SearchParameterMap theSearchParameterMap) {
|
||||
if (theSearchParameterMap.containsKey("code")) {
|
||||
if (theSearchParameterMap.containsKey(IndexConstants.CODE_SEARCH_PARAM)) {
|
||||
ArrayList<String> codeSystemHashList = new ArrayList<>();
|
||||
ArrayList<String> codeOnlyList = new ArrayList<>();
|
||||
ArrayList<String> systemOnlyList = new ArrayList<>();
|
||||
ArrayList<String> textOnlyList = new ArrayList<>();
|
||||
List<List<IQueryParameterType>> andOrParams = theSearchParameterMap.get("code");
|
||||
List<List<IQueryParameterType>> andOrParams = theSearchParameterMap.get(IndexConstants.CODE_SEARCH_PARAM);
|
||||
for (List<? extends IQueryParameterType> nextAnd : andOrParams) {
|
||||
codeSystemHashList.addAll(getCodingCodeSystemValues(nextAnd));
|
||||
codeOnlyList.addAll(getCodingCodeOnlyValues(nextAnd));
|
||||
|
@ -555,22 +511,11 @@ public class ElasticsearchSvcImpl implements IElasticsearchSvc {
|
|||
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
void deleteAllDocuments(String theIndexName) throws IOException {
|
||||
DeleteByQueryRequest deleteByQueryRequest = new DeleteByQueryRequest(theIndexName);
|
||||
deleteByQueryRequest.setQuery(QueryBuilders.matchAllQuery());
|
||||
myRestHighLevelClient.deleteByQuery(deleteByQueryRequest, RequestOptions.DEFAULT);
|
||||
}
|
||||
|
||||
/* @Override
|
||||
public IBundleProvider lastN(HttpServletRequest theServletRequest, RequestDetails theRequestDetails) {
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public IBundleProvider uniqueCodes(HttpServletRequest theServletRequest, RequestDetails theRequestDetails) {
|
||||
return null;
|
||||
}
|
||||
|
||||
*/
|
||||
}
|
||||
|
|
|
@ -1,20 +1,10 @@
|
|||
package ca.uhn.fhir.jpa.search.lastn;
|
||||
|
||||
//import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||
|
||||
import ca.uhn.fhir.jpa.dao.index.IdHelperService;
|
||||
import ca.uhn.fhir.jpa.model.cross.ResourcePersistentId;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
public interface IElasticsearchSvc {
|
||||
|
||||
|
||||
// IBundleProvider lastN(javax.servlet.http.HttpServletRequest theServletRequest, ca.uhn.fhir.rest.api.server.RequestDetails theRequestDetails);
|
||||
|
||||
// IBundleProvider uniqueCodes(javax.servlet.http.HttpServletRequest theServletRequest, ca.uhn.fhir.rest.api.server.RequestDetails theRequestDetails);
|
||||
|
||||
List<ResourcePersistentId> executeLastN(SearchParameterMap theSearchParameterMap, RequestDetails theRequestDetails, IdHelperService theIdHelperService);
|
||||
List<String> executeLastN(SearchParameterMap theSearchParameterMap, Integer theMaxObservationsPerCode);
|
||||
}
|
||||
|
|
|
@ -6,4 +6,10 @@ public class IndexConstants {
|
|||
public static final String CODE_INDEX = "code_index";
|
||||
public static final String OBSERVATION_DOCUMENT_TYPE = "ca.uhn.fhir.jpa.dao.lastn.entity.ObservationIndexedSearchParamLastNEntity";
|
||||
public static final String CODE_DOCUMENT_TYPE = "ca.uhn.fhir.jpa.dao.lastn.entity.ObservationIndexedCodeCodeableConceptEntity";
|
||||
|
||||
public static final String SUBJECT_SEARCH_PARAM = "subject";
|
||||
public static final String PATIENT_SEARCH_PARAM = "patient";
|
||||
public static final String CODE_SEARCH_PARAM = "code";
|
||||
public static final String CATEGORY_SEARCH_PARAM = "category";
|
||||
|
||||
}
|
||||
|
|
|
@ -1,122 +0,0 @@
|
|||
package ca.uhn.fhir.jpa.search.lastn.cli;
|
||||
|
||||
import ca.uhn.fhir.jpa.search.lastn.json.IdJson;
|
||||
import ca.uhn.fhir.jpa.search.lastn.json.IndexJson;
|
||||
import ca.uhn.fhir.jpa.search.lastn.json.ObservationJson;
|
||||
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
import com.fasterxml.jackson.core.JsonGenerator;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.fasterxml.jackson.databind.SerializationFeature;
|
||||
import org.hl7.fhir.r4.model.CodeableConcept;
|
||||
import org.hl7.fhir.r4.model.Coding;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.StringWriter;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
public class MulticodeObservationsIntoElasticSearch {
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(MulticodeObservationsIntoElasticSearch.class);
|
||||
|
||||
private static final ObjectMapper ourMapperNonPrettyPrint;
|
||||
|
||||
static {
|
||||
ourMapperNonPrettyPrint = new ObjectMapper();
|
||||
ourMapperNonPrettyPrint.setSerializationInclusion(JsonInclude.Include.NON_NULL);
|
||||
ourMapperNonPrettyPrint.disable(SerializationFeature.INDENT_OUTPUT);
|
||||
ourMapperNonPrettyPrint.disable(JsonGenerator.Feature.AUTO_CLOSE_TARGET);
|
||||
}
|
||||
|
||||
public static void main(String[] theArgs) {
|
||||
|
||||
for (int patientCount = 0; patientCount < 10 ; patientCount++) {
|
||||
|
||||
String subject = "Patient/"+UUID.randomUUID().toString();
|
||||
|
||||
for ( int entryCount = 0; entryCount < 10 ; entryCount++ ) {
|
||||
String nextResourceId = UUID.randomUUID().toString();
|
||||
|
||||
IdJson id = new IdJson(nextResourceId);
|
||||
IndexJson documentIndex = new IndexJson(id);
|
||||
|
||||
ObservationJson observationDocument = new ObservationJson();
|
||||
observationDocument.setIdentifier(nextResourceId);
|
||||
observationDocument.setSubject(subject);
|
||||
// Add three CodeableConcepts for category
|
||||
List<CodeableConcept> category = new ArrayList<>();
|
||||
// Create three codings and first category CodeableConcept
|
||||
Coding categoryCoding1_1 = new Coding("http://mycodes.org/fhir/observation-category", "test-heart-rate", "test heart-rate");
|
||||
Coding categoryCoding1_2 = new Coding("http://myalternatecodes.org/fhir/observation-category", "test-alt-heart-rate", "test alternate heart-rate");
|
||||
Coding categoryCoding1_3 = new Coding("http://mysecondaltcodes.org/fhir/observation-category", "test-2nd-alt-heart-rate", "test second alternate heart-rate");
|
||||
CodeableConcept categoryCodeableConcept1 = new CodeableConcept();
|
||||
categoryCodeableConcept1.getCoding().add(categoryCoding1_1);
|
||||
categoryCodeableConcept1.getCoding().add(categoryCoding1_2);
|
||||
categoryCodeableConcept1.getCoding().add(categoryCoding1_3);
|
||||
categoryCodeableConcept1.setText("Heart Rate Codeable Concept");
|
||||
category.add(categoryCodeableConcept1);
|
||||
// Create three codings and second category CodeableConcept
|
||||
Coding categoryCoding2_1 = new Coding("http://mycodes.org/fhir/observation-category", "test-vital-signs", "test vital signs");
|
||||
Coding categoryCoding2_2 = new Coding("http://myalternatecodes.org/fhir/observation-category", "test-alt-vitals", "test alternate vital signs");
|
||||
Coding categoryCoding2_3 = new Coding("http://mysecondaltcodes.org/fhir/observation-category", "test-2nd-alt-vitals", "test second alternate vital signs");
|
||||
CodeableConcept categoryCodeableConcept2 = new CodeableConcept();
|
||||
categoryCodeableConcept2.getCoding().add(categoryCoding2_1);
|
||||
categoryCodeableConcept2.getCoding().add(categoryCoding2_2);
|
||||
categoryCodeableConcept2.getCoding().add(categoryCoding2_3);
|
||||
categoryCodeableConcept2.setText("Vital Signs Codeable Concept");
|
||||
category.add(categoryCodeableConcept2);
|
||||
// Create three codings and third category CodeableConcept
|
||||
Coding categoryCoding3_1 = new Coding("http://mycodes.org/fhir/observation-category", "test-vitals-panel", "test vital signs panel");
|
||||
Coding categoryCoding3_2 = new Coding("http://myalternatecodes.org/fhir/observation-category", "test-alt-vitals-panel", "test alternate vital signs panel");
|
||||
Coding categoryCoding3_3 = new Coding("http://mysecondaltcodes.org/fhir/observation-category", "test-2nd-alt-vitals-panel", "test second alternate vital signs panel");
|
||||
CodeableConcept categoryCodeableConcept3 = new CodeableConcept();
|
||||
categoryCodeableConcept3.getCoding().add(categoryCoding3_1);
|
||||
categoryCodeableConcept3.getCoding().add(categoryCoding3_2);
|
||||
categoryCodeableConcept3.getCoding().add(categoryCoding3_3);
|
||||
categoryCodeableConcept3.setText("Vital Signs Panel Codeable Concept");
|
||||
category.add(categoryCodeableConcept3);
|
||||
observationDocument.setCategories(category);
|
||||
|
||||
Coding codeCoding1 = new Coding("http://mycodes.org/fhir/observation-code", "test-code", "test observation code");
|
||||
Coding codeCoding2 = new Coding("http://myalternatecodes.org/fhir/observation-code", "test-alt-code", "test observation code");
|
||||
Coding codeCoding3 = new Coding("http://mysecondaltcodes.org/fhir/observation-code", "test-second-alt-code", "test observation code");
|
||||
CodeableConcept code = new CodeableConcept();
|
||||
code.getCoding().add(codeCoding1);
|
||||
code.getCoding().add(codeCoding2);
|
||||
code.getCoding().add(codeCoding3);
|
||||
code.setText("Observation code CodeableConcept");
|
||||
observationDocument.setCode(code);
|
||||
observationDocument.setCode_concept_id("multicode_test_normalized_code");
|
||||
|
||||
Date effectiveDtm = new Date();
|
||||
observationDocument.setEffectiveDtm(effectiveDtm);
|
||||
|
||||
StringWriter stringWriter = new StringWriter();
|
||||
|
||||
File outputFile = new File("Observations_multiplecodes.json");
|
||||
try {
|
||||
FileOutputStream outputStream = new FileOutputStream(outputFile, true);
|
||||
ourMapperNonPrettyPrint.writeValue(stringWriter, documentIndex);
|
||||
stringWriter.append('\n');
|
||||
ourMapperNonPrettyPrint.writeValue(stringWriter, observationDocument);
|
||||
ourMapperNonPrettyPrint.writeValue(outputStream, documentIndex);
|
||||
outputStream.write('\n');
|
||||
ourMapperNonPrettyPrint.writeValue(outputStream, observationDocument);
|
||||
outputStream.write('\n');
|
||||
outputStream.flush();
|
||||
outputStream.close();
|
||||
} catch (IOException theE) {
|
||||
theE.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
ourLog.info("Upload complete");
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -1,128 +0,0 @@
|
|||
package ca.uhn.fhir.jpa.search.lastn.cli;
|
||||
|
||||
import ca.uhn.fhir.jpa.search.lastn.ElasticsearchBulkIndexSvcImpl;
|
||||
import ca.uhn.fhir.jpa.search.lastn.IndexConstants;
|
||||
import ca.uhn.fhir.jpa.search.lastn.json.CodeJson;
|
||||
import ca.uhn.fhir.jpa.search.lastn.json.ObservationJson;
|
||||
import ca.uhn.fhir.jpa.search.lastn.util.SimpleStopWatch;
|
||||
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
import com.fasterxml.jackson.core.JsonGenerator;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.fasterxml.jackson.databind.SerializationFeature;
|
||||
import org.apache.commons.lang3.RandomStringUtils;
|
||||
import org.hl7.fhir.r4.model.CodeableConcept;
|
||||
import org.hl7.fhir.r4.model.Coding;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.*;
|
||||
|
||||
public class OneMillionPatientsIntoElasticSearch {
|
||||
// private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(OneMillionPatientsIntoElasticSearch.class);
|
||||
|
||||
private static final ObjectMapper ourMapperNonPrettyPrint;
|
||||
|
||||
static {
|
||||
ourMapperNonPrettyPrint = new ObjectMapper();
|
||||
ourMapperNonPrettyPrint.setSerializationInclusion(JsonInclude.Include.NON_NULL);
|
||||
ourMapperNonPrettyPrint.disable(SerializationFeature.INDENT_OUTPUT);
|
||||
ourMapperNonPrettyPrint.disable(JsonGenerator.Feature.AUTO_CLOSE_TARGET);
|
||||
}
|
||||
|
||||
public static void main(String[] theArgs) throws IOException {
|
||||
|
||||
ElasticsearchBulkIndexSvcImpl elasticsearchSvc = new ElasticsearchBulkIndexSvcImpl("localhost",9301, "elastic", "changeme");
|
||||
try {
|
||||
|
||||
SimpleStopWatch stopwatch = new SimpleStopWatch();
|
||||
|
||||
List<String> observationCodeIds = new ArrayList<>();
|
||||
List<CodeableConcept> observationCodes = new ArrayList<>();
|
||||
|
||||
for (int codeCount = 0; codeCount < 1000; codeCount++) {
|
||||
String code = RandomStringUtils.random(10,true,true);
|
||||
Coding codeCoding = new Coding("http://mycodes.org/fhir/observation-code", code, "test observation code " + code);
|
||||
CodeableConcept codeConcept = new CodeableConcept();
|
||||
codeConcept.getCoding().add(codeCoding);
|
||||
codeConcept.setText("test observation code concept " + code);
|
||||
String codeableConceptId = UUID.randomUUID().toString();
|
||||
observationCodeIds.add(codeableConceptId);
|
||||
observationCodes.add(codeConcept);
|
||||
CodeJson codeDocument = new CodeJson(codeConcept, codeableConceptId);
|
||||
String printedObservationDocument = ourMapperNonPrettyPrint.writeValueAsString(codeDocument);
|
||||
System.out.println(printedObservationDocument);
|
||||
System.out.println(codeableConceptId);
|
||||
elasticsearchSvc.addToBulkIndexRequest("code_index", codeableConceptId, printedObservationDocument, IndexConstants.CODE_DOCUMENT_TYPE);
|
||||
if ((codeCount+1)%250 == 0) {
|
||||
elasticsearchSvc.executeBulkIndex();
|
||||
long elapsedTime = stopwatch.getElapsedTime();
|
||||
stopwatch.restart();
|
||||
System.out.println("Elapsed processing time = " + elapsedTime/1000 + "s");
|
||||
System.out.println("Average processing time/code = " + elapsedTime/5000 + "ms");
|
||||
}
|
||||
}
|
||||
|
||||
for (int patientCount = 0; patientCount < 1000000 ; patientCount++) {
|
||||
String subject = "Patient/"+UUID.randomUUID().toString();
|
||||
ArrayList<CodeableConcept> observationCodesSubSet = new ArrayList<>();
|
||||
ArrayList<String> observationCodeIdsSubSet = new ArrayList<>();
|
||||
for (int observationCount = 0; observationCount < 15 ; observationCount++) {
|
||||
int codeIndex = (int) (1000 * Math.random());
|
||||
observationCodesSubSet.add(observationCodes.get(codeIndex));
|
||||
observationCodeIdsSubSet.add(observationCodeIds.get(codeIndex));
|
||||
}
|
||||
int repeatedCodeIndex = (int) (1000 * Math.random());
|
||||
CodeableConcept repeatedCoding = observationCodes.get(repeatedCodeIndex);
|
||||
for (int observationCount = 0; observationCount < 10 ; observationCount++ ) {
|
||||
observationCodesSubSet.add(repeatedCoding);
|
||||
observationCodeIdsSubSet.add(observationCodeIds.get(repeatedCodeIndex));
|
||||
}
|
||||
int entryCount = 0;
|
||||
for (int codingCount=0; codingCount<observationCodesSubSet.size(); codingCount++) {
|
||||
String nextResourceId = UUID.randomUUID().toString();
|
||||
ObservationJson observationDocument = new ObservationJson();
|
||||
observationDocument.setIdentifier(nextResourceId);
|
||||
observationDocument.setSubject(subject);
|
||||
List<CodeableConcept> category = new ArrayList<>();
|
||||
Coding categoryCoding = new Coding("http://mycodes.org/fhir/category-code", "test-category-code", "test category display");
|
||||
CodeableConcept categoryCodeableConcept = new CodeableConcept();
|
||||
categoryCodeableConcept.getCoding().add(categoryCoding);
|
||||
categoryCodeableConcept.setText("Test Category CodeableConcept Text");
|
||||
category.add(categoryCodeableConcept);
|
||||
observationDocument.setCategories(category);
|
||||
observationDocument.setCode(observationCodesSubSet.get(codingCount));
|
||||
observationDocument.setCode_concept_id(observationCodeIdsSubSet.get(codingCount));
|
||||
Calendar observationDate = new GregorianCalendar();
|
||||
observationDate.add(Calendar.HOUR, -10 + entryCount);
|
||||
entryCount++;
|
||||
Date effectiveDtm = observationDate.getTime();
|
||||
observationDocument.setEffectiveDtm(effectiveDtm);
|
||||
|
||||
String printedObservationDocument = ourMapperNonPrettyPrint.writeValueAsString(observationDocument);
|
||||
elasticsearchSvc.addToBulkIndexRequest("observation_index", nextResourceId, printedObservationDocument, IndexConstants.OBSERVATION_DOCUMENT_TYPE );
|
||||
}
|
||||
if ((patientCount+1)%100 == 0) {
|
||||
System.out.println("Entries created = " + (patientCount+1)*25);
|
||||
}
|
||||
if ((patientCount+1)%250 == 0) {
|
||||
elasticsearchSvc.executeBulkIndex();
|
||||
long elapsedTime = stopwatch.getElapsedTime();
|
||||
stopwatch.restart();
|
||||
System.out.println("Elapsed processing time = " + elapsedTime/1000 + "s");
|
||||
System.out.println("Average processing time/observation = " + elapsedTime/5000 + "ms");
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
if (elasticsearchSvc.bulkRequestPending()) {
|
||||
elasticsearchSvc.executeBulkIndex();
|
||||
}
|
||||
|
||||
// ourLog.info("Upload complete");
|
||||
} finally {
|
||||
elasticsearchSvc.closeClient();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -1,116 +0,0 @@
|
|||
package ca.uhn.fhir.jpa.search.lastn.cli;
|
||||
|
||||
import ca.uhn.fhir.jpa.search.lastn.json.IdJson;
|
||||
import ca.uhn.fhir.jpa.search.lastn.json.IndexJson;
|
||||
import ca.uhn.fhir.jpa.search.lastn.json.ObservationJson;
|
||||
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
import com.fasterxml.jackson.core.JsonGenerator;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.fasterxml.jackson.databind.SerializationFeature;
|
||||
import org.hl7.fhir.r4.model.CodeableConcept;
|
||||
import org.hl7.fhir.r4.model.Coding;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.StringWriter;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
public class OneThousandObservationsIntoElasticSearch {
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(OneThousandObservationsIntoElasticSearch.class);
|
||||
|
||||
private static final ObjectMapper ourMapperNonPrettyPrint;
|
||||
|
||||
static {
|
||||
ourMapperNonPrettyPrint = new ObjectMapper();
|
||||
ourMapperNonPrettyPrint.setSerializationInclusion(JsonInclude.Include.NON_NULL);
|
||||
ourMapperNonPrettyPrint.disable(SerializationFeature.INDENT_OUTPUT);
|
||||
ourMapperNonPrettyPrint.disable(JsonGenerator.Feature.AUTO_CLOSE_TARGET);
|
||||
}
|
||||
|
||||
public static void main(String[] theArgs) {
|
||||
|
||||
for (int patientCount = 0; patientCount < 3 ; patientCount++) {
|
||||
|
||||
String subject = "Patient/"+UUID.randomUUID().toString();
|
||||
|
||||
for ( int entryCount = 0; entryCount < 1100 ; entryCount++ ) {
|
||||
String nextResourceId = UUID.randomUUID().toString();
|
||||
|
||||
IdJson id = new IdJson(nextResourceId);
|
||||
IndexJson documentIndex = new IndexJson(id);
|
||||
|
||||
ObservationJson observationDocument = new ObservationJson();
|
||||
observationDocument.setIdentifier(nextResourceId);
|
||||
observationDocument.setSubject(subject);
|
||||
// Add three CodeableConcepts for category
|
||||
List<CodeableConcept> category = new ArrayList<>();
|
||||
// Create three codings and first category CodeableConcept
|
||||
Coding categoryCoding1_1 = new Coding("http://mycodes.org/fhir/observation-category", "test-heart-rate", "test heart-rate");
|
||||
Coding categoryCoding1_2 = new Coding("http://myalternatecodes.org/fhir/observation-category", "test-alt-heart-rate", "test alternate heart-rate");
|
||||
CodeableConcept categoryCodeableConcept1 = new CodeableConcept();
|
||||
categoryCodeableConcept1.getCoding().add(categoryCoding1_1);
|
||||
categoryCodeableConcept1.getCoding().add(categoryCoding1_2);
|
||||
categoryCodeableConcept1.setText("Heart Rate CodeableConcept");
|
||||
category.add(categoryCodeableConcept1);
|
||||
// Create three codings and second category CodeableConcept
|
||||
Coding categoryCoding2_1 = new Coding("http://mycodes.org/fhir/observation-category", "test-vital-signs", "test vital signs");
|
||||
Coding categoryCoding2_2 = new Coding("http://myalternatecodes.org/fhir/observation-category", "test-alt-vitals", "test alternate vital signs");
|
||||
CodeableConcept categoryCodeableConcept2 = new CodeableConcept();
|
||||
categoryCodeableConcept2.getCoding().add(categoryCoding2_1);
|
||||
categoryCodeableConcept2.getCoding().add(categoryCoding2_2);
|
||||
categoryCodeableConcept2.setText("Vital Signs CodeableConcept");
|
||||
category.add(categoryCodeableConcept2);
|
||||
// Create three codings and third category CodeableConcept
|
||||
Coding categoryCoding3_1 = new Coding("http://mycodes.org/fhir/observation-category", "test-vitals-panel", "test vital signs panel");
|
||||
Coding categoryCoding3_2 = new Coding("http://myalternatecodes.org/fhir/observation-category", "test-alt-vitals-panel", "test alternate vital signs panel");
|
||||
CodeableConcept categoryCodeableConcept3 = new CodeableConcept();
|
||||
categoryCodeableConcept3.getCoding().add(categoryCoding3_1);
|
||||
categoryCodeableConcept3.getCoding().add(categoryCoding3_2);
|
||||
categoryCodeableConcept3.setText("Vital Signs Panel CodeableConcept");
|
||||
category.add(categoryCodeableConcept3);
|
||||
observationDocument.setCategories(category);
|
||||
|
||||
Coding codeCoding1 = new Coding("http://mycodes.org/fhir/observation-code", "test-code_" + entryCount, "test observation code");
|
||||
Coding codeCoding2 = new Coding("http://myalternatecodes.org/fhir/observation-code", "test-alt-code_" + entryCount, "test observation code");
|
||||
Coding codeCoding3 = new Coding("http://mysecondaltcodes.org/fhir/observation-code", "test-second-alt-code" + entryCount, "test observation code");
|
||||
CodeableConcept code = new CodeableConcept();
|
||||
code.getCoding().add(codeCoding1);
|
||||
code.getCoding().add(codeCoding2);
|
||||
code.getCoding().add(codeCoding3);
|
||||
code.setText("Observation code CodeableConcept " + entryCount);
|
||||
observationDocument.setCode(code);
|
||||
observationDocument.setCode_concept_id("multicode_test_normalized_code_" + entryCount);
|
||||
|
||||
Date effectiveDtm = new Date();
|
||||
observationDocument.setEffectiveDtm(effectiveDtm);
|
||||
|
||||
StringWriter stringWriter = new StringWriter();
|
||||
|
||||
File outputFile = new File("one_thousand_observations.json");
|
||||
try {
|
||||
FileOutputStream outputStream = new FileOutputStream(outputFile, true);
|
||||
ourMapperNonPrettyPrint.writeValue(stringWriter, documentIndex);
|
||||
stringWriter.append('\n');
|
||||
ourMapperNonPrettyPrint.writeValue(stringWriter, observationDocument);
|
||||
ourMapperNonPrettyPrint.writeValue(outputStream, documentIndex);
|
||||
outputStream.write('\n');
|
||||
ourMapperNonPrettyPrint.writeValue(outputStream, observationDocument);
|
||||
outputStream.write('\n');
|
||||
outputStream.flush();
|
||||
outputStream.close();
|
||||
} catch (IOException theE) {
|
||||
theE.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
ourLog.info("Upload complete");
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -1,153 +0,0 @@
|
|||
package ca.uhn.fhir.jpa.search.lastn.cli;
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.parser.IParser;
|
||||
import ca.uhn.fhir.parser.LenientErrorHandler;
|
||||
import ca.uhn.fhir.jpa.search.lastn.json.IdJson;
|
||||
import ca.uhn.fhir.jpa.search.lastn.json.IndexJson;
|
||||
import ca.uhn.fhir.jpa.search.lastn.json.ObservationJson;
|
||||
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
import com.fasterxml.jackson.core.JsonGenerator;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.fasterxml.jackson.databind.SerializationFeature;
|
||||
import com.google.common.base.Charsets;
|
||||
import org.apache.commons.compress.compressors.bzip2.BZip2CompressorInputStream;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.hl7.fhir.r4.model.*;
|
||||
import org.hl7.fhir.r4.model.Bundle.BundleEntryComponent;
|
||||
import org.springframework.core.io.Resource;
|
||||
import org.springframework.core.io.support.PathMatchingResourcePatternResolver;
|
||||
|
||||
import java.io.*;
|
||||
import java.util.Arrays;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
|
||||
public class UploadSampleDatasetIntoElasticSearch {
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(UploadSampleDatasetIntoElasticSearch.class);
|
||||
|
||||
private static final ObjectMapper ourMapperNonPrettyPrint;
|
||||
|
||||
static {
|
||||
ourMapperNonPrettyPrint = new ObjectMapper();
|
||||
ourMapperNonPrettyPrint.setSerializationInclusion(JsonInclude.Include.NON_NULL);
|
||||
ourMapperNonPrettyPrint.disable(SerializationFeature.INDENT_OUTPUT);
|
||||
ourMapperNonPrettyPrint.disable(JsonGenerator.Feature.AUTO_CLOSE_TARGET);
|
||||
}
|
||||
|
||||
public static void main(String[] theArgs) {
|
||||
|
||||
FhirContext myFhirCtx = FhirContext.forR4();
|
||||
myFhirCtx.getRestfulClientFactory().setSocketTimeout(120000);
|
||||
|
||||
PathMatchingResourcePatternResolver provider = new PathMatchingResourcePatternResolver();
|
||||
final Resource[] bundleResources;
|
||||
try {
|
||||
bundleResources = provider.getResources("*.json.bz2");
|
||||
} catch (IOException e) {
|
||||
throw new RuntimeException("Unexpected error during transmission: " + e.toString(), e);
|
||||
}
|
||||
|
||||
AtomicInteger index = new AtomicInteger();
|
||||
|
||||
Arrays.stream(bundleResources).forEach(
|
||||
resource -> {
|
||||
index.incrementAndGet();
|
||||
|
||||
InputStream resIs = null;
|
||||
String nextBundleString;
|
||||
try {
|
||||
resIs = resource.getInputStream();
|
||||
resIs = new BZip2CompressorInputStream(resIs);
|
||||
nextBundleString = IOUtils.toString(resIs, Charsets.UTF_8);
|
||||
} catch (IOException e) {
|
||||
ourLog.error("Failure reading: {}", resource.getFilename(), e);
|
||||
return;
|
||||
} finally {
|
||||
try {
|
||||
if (resIs != null) {
|
||||
resIs.close();
|
||||
}
|
||||
} catch (final IOException ioe) {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
|
||||
ourLog.info("Uploading {}/{} - {} ({} bytes)", index, bundleResources.length, resource.getFilename(), nextBundleString.length());
|
||||
|
||||
/*
|
||||
* SMART demo apps rely on the use of LOINC 3141-9 (Body Weight Measured)
|
||||
* instead of LOINC 29463-7 (Body Weight)
|
||||
*/
|
||||
nextBundleString = nextBundleString.replace("\"29463-7\"", "\"3141-9\"");
|
||||
|
||||
IParser parser = myFhirCtx.newJsonParser();
|
||||
parser.setParserErrorHandler(new LenientErrorHandler(false));
|
||||
Bundle bundle = parser.parseResource(Bundle.class, nextBundleString);
|
||||
|
||||
for (BundleEntryComponent nextEntry : bundle.getEntry()) {
|
||||
|
||||
/*
|
||||
* Synthea gives resources UUIDs with urn:uuid: prefix, which is only
|
||||
* used for placeholders. We're going to use these as the actual resource
|
||||
* IDs, so we strip the prefix.
|
||||
*/
|
||||
String nextResourceId = nextEntry.getFullUrl();
|
||||
if (nextResourceId == null) {
|
||||
nextResourceId = UUID.randomUUID().toString();
|
||||
}
|
||||
|
||||
nextResourceId = nextResourceId.replace("urn:uuid:", "");
|
||||
nextEntry.getResource().setId(nextResourceId);
|
||||
nextEntry.setFullUrl(nextResourceId);
|
||||
|
||||
if (nextEntry.getResource().getResourceType().equals(ResourceType.Observation)) {
|
||||
|
||||
IdJson id = new IdJson(nextResourceId);
|
||||
IndexJson documentIndex = new IndexJson(id);
|
||||
|
||||
org.hl7.fhir.r4.model.Observation observation = (Observation) nextEntry.getResource();
|
||||
ObservationJson observationDocument = new ObservationJson();
|
||||
observationDocument.setIdentifier(nextResourceId);
|
||||
String subject = "Patient/"+observation.getSubject().getReference();
|
||||
observationDocument.setSubject(subject);
|
||||
List<CodeableConcept> category = observation.getCategory();
|
||||
observationDocument.setCategories(category);
|
||||
observationDocument.setCode_concept_id(category.get(0).getCodingFirstRep().getSystem() + "/" + category.get(0).getCodingFirstRep().getCode());
|
||||
CodeableConcept code = observation.getCode();
|
||||
observationDocument.setCode(code);
|
||||
Date effectiveDtm = observation.getEffectiveDateTimeType().getValue();
|
||||
observationDocument.setEffectiveDtm(effectiveDtm);
|
||||
|
||||
StringWriter stringWriter = new StringWriter();
|
||||
File outputFile = new File("Observations.json");
|
||||
try {
|
||||
FileOutputStream outputStream = new FileOutputStream(outputFile, true);
|
||||
ourMapperNonPrettyPrint.writeValue(stringWriter, documentIndex);
|
||||
stringWriter.append('\n');
|
||||
ourMapperNonPrettyPrint.writeValue(stringWriter, observationDocument);
|
||||
ourMapperNonPrettyPrint.writeValue(outputStream, documentIndex);
|
||||
outputStream.write('\n');
|
||||
ourMapperNonPrettyPrint.writeValue(outputStream, observationDocument);
|
||||
outputStream.write('\n');
|
||||
outputStream.flush();
|
||||
outputStream.close();
|
||||
} catch (IOException theE) {
|
||||
theE.printStackTrace();
|
||||
}
|
||||
System.out.println(stringWriter.toString());
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
);
|
||||
|
||||
ourLog.info("Upload complete");
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -22,6 +22,10 @@ public class TestR4ConfigWithElasticSearch extends TestR4Config {
|
|||
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(TestR4ConfigWithElasticSearch.class);
|
||||
private static final String ELASTIC_VERSION = "6.5.4";
|
||||
protected final String elasticsearchHost = "localhost";
|
||||
protected final String elasticsearchUserId = "";
|
||||
protected final String elasticsearchPassword = "";
|
||||
|
||||
|
||||
@Override
|
||||
@Bean
|
||||
|
@ -38,9 +42,9 @@ public class TestR4ConfigWithElasticSearch extends TestR4Config {
|
|||
.setIndexSchemaManagementStrategy(IndexSchemaManagementStrategy.CREATE)
|
||||
.setIndexManagementWaitTimeoutMillis(10000)
|
||||
.setRequiredIndexStatus(ElasticsearchIndexStatus.YELLOW)
|
||||
.setRestUrl("http://localhost:" + httpPort)
|
||||
.setUsername("")
|
||||
.setPassword("")
|
||||
.setRestUrl("http://"+ elasticsearchHost + ":" + httpPort)
|
||||
.setUsername(elasticsearchUserId)
|
||||
.setPassword(elasticsearchPassword)
|
||||
.apply(retVal);
|
||||
|
||||
return retVal;
|
||||
|
@ -65,7 +69,6 @@ public class TestR4ConfigWithElasticSearch extends TestR4Config {
|
|||
return embeddedElastic;
|
||||
}
|
||||
|
||||
|
||||
@PreDestroy
|
||||
public void stop() {
|
||||
embeddedElasticSearch().stop();
|
||||
|
|
|
@ -0,0 +1,16 @@
|
|||
package ca.uhn.fhir.jpa.config;
|
||||
|
||||
import ca.uhn.fhir.jpa.search.lastn.ElasticsearchSvcImpl;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
|
||||
@Configuration
|
||||
public class TestR4ConfigWithElasticsearchClient extends TestR4ConfigWithElasticSearch {
|
||||
|
||||
@Bean()
|
||||
public ElasticsearchSvcImpl myElasticsearchSvc() {
|
||||
int elasticsearchPort = embeddedElasticSearch().getHttpPort();
|
||||
return new ElasticsearchSvcImpl(elasticsearchHost, elasticsearchPort, elasticsearchUserId, elasticsearchPassword);
|
||||
}
|
||||
|
||||
}
|
|
@ -2,20 +2,16 @@ package ca.uhn.fhir.jpa.dao.lastn;
|
|||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
|
||||
import ca.uhn.fhir.jpa.dao.lastn.config.TestIntegratedObservationIndexSearchConfig;
|
||||
import ca.uhn.fhir.jpa.config.TestR4ConfigWithElasticsearchClient;
|
||||
import ca.uhn.fhir.jpa.dao.data.IObservationIndexedCodeCodeableConceptSearchParamDao;
|
||||
import ca.uhn.fhir.jpa.dao.data.IObservationIndexedSearchParamLastNDao;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.parser.IParser;
|
||||
import ca.uhn.fhir.parser.LenientErrorHandler;
|
||||
import ca.uhn.fhir.rest.param.ReferenceParam;
|
||||
import ca.uhn.fhir.rest.param.TokenParam;
|
||||
import ca.uhn.fhir.rest.param.*;
|
||||
import ca.uhn.fhir.jpa.search.lastn.ElasticsearchSvcImpl;
|
||||
import ca.uhn.fhir.jpa.search.lastn.json.ObservationJson;
|
||||
import com.google.common.base.Charsets;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.shadehapi.elasticsearch.action.search.SearchRequest;
|
||||
import org.shadehapi.elasticsearch.action.search.SearchResponse;
|
||||
import org.hl7.fhir.r4.model.*;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
|
@ -34,7 +30,7 @@ import java.util.concurrent.atomic.AtomicInteger;
|
|||
import static org.junit.Assert.assertEquals;
|
||||
|
||||
@RunWith(SpringJUnit4ClassRunner.class)
|
||||
@ContextConfiguration(classes = { TestIntegratedObservationIndexSearchConfig.class })
|
||||
@ContextConfiguration(classes = { TestR4ConfigWithElasticsearchClient.class })
|
||||
public class IntegratedObservationIndexedSearchParamLastNTest {
|
||||
|
||||
@Autowired
|
||||
|
@ -128,29 +124,16 @@ public class IntegratedObservationIndexedSearchParamLastNTest {
|
|||
|
||||
SearchParameterMap searchParameterMap = new SearchParameterMap();
|
||||
ReferenceParam subjectParam = new ReferenceParam("Patient", "", SUBJECTID);
|
||||
searchParameterMap.add("subject", subjectParam);
|
||||
searchParameterMap.add(Observation.SP_SUBJECT, new ReferenceAndListParam().addAnd(new ReferenceOrListParam().addOr(subjectParam)));
|
||||
TokenParam categoryParam = new TokenParam(CATEGORYFIRSTCODINGSYSTEM, FIRSTCATEGORYFIRSTCODINGCODE);
|
||||
searchParameterMap.add("category", categoryParam);
|
||||
searchParameterMap.add(Observation.SP_CATEGORY, new TokenAndListParam().addAnd(new TokenOrListParam().addOr(categoryParam)));
|
||||
TokenParam codeParam = new TokenParam(CODEFIRSTCODINGSYSTEM, CODEFIRSTCODINGCODE);
|
||||
searchParameterMap.add("code", codeParam);
|
||||
searchParameterMap.add(Observation.SP_CODE, new TokenAndListParam().addAnd(new TokenOrListParam().addOr(codeParam)));
|
||||
|
||||
// execute Observation ID search - Terms Aggregation
|
||||
SearchRequest searchRequestIdsOnly = elasticsearchSvc.buildObservationAllFieldsTermsSearchRequest(1000, searchParameterMap, 3);
|
||||
SearchResponse responseIds = elasticsearchSvc.executeSearchRequest(searchRequestIdsOnly);
|
||||
List<ObservationJson> observationIdsOnly = elasticsearchSvc.buildObservationTermsResults(responseIds);
|
||||
List<String> observationIdsOnly = elasticsearchSvc.executeLastN(searchParameterMap, 3);
|
||||
|
||||
assertEquals(1, observationIdsOnly.size());
|
||||
ObservationJson observationIdOnly = observationIdsOnly.get(0);
|
||||
assertEquals(RESOURCEPID, observationIdOnly.getIdentifier());
|
||||
|
||||
// execute Observation ID search - Composite Aggregation
|
||||
searchRequestIdsOnly = elasticsearchSvc.buildObservationAllFieldsCompositeSearchRequest(1000, searchParameterMap, 3);
|
||||
responseIds = elasticsearchSvc.executeSearchRequest(searchRequestIdsOnly);
|
||||
observationIdsOnly = elasticsearchSvc.buildObservationCompositeResults(responseIds);
|
||||
|
||||
assertEquals(1, observationIdsOnly.size());
|
||||
observationIdOnly = observationIdsOnly.get(0);
|
||||
assertEquals(RESOURCEPID, observationIdOnly.getIdentifier());
|
||||
assertEquals(RESOURCEPID, observationIdsOnly.get(0));
|
||||
|
||||
}
|
||||
|
||||
|
@ -278,19 +261,13 @@ public class IntegratedObservationIndexedSearchParamLastNTest {
|
|||
SearchParameterMap searchParameterMap = new SearchParameterMap();
|
||||
|
||||
// execute Observation ID search - Composite Aggregation
|
||||
SearchRequest searchRequestIdsOnly = elasticsearchSvc.buildObservationAllFieldsCompositeSearchRequest(1000, searchParameterMap, 1);
|
||||
SearchResponse responseIds = elasticsearchSvc.executeSearchRequest(searchRequestIdsOnly);
|
||||
List<ObservationJson> observationIdsOnly = elasticsearchSvc.buildObservationCompositeResults(responseIds);
|
||||
List<String> observationIdsOnly = elasticsearchSvc.executeLastN(searchParameterMap,1);
|
||||
|
||||
assertEquals(20, observationIdsOnly.size());
|
||||
ObservationJson observationIdOnly = observationIdsOnly.get(0);
|
||||
|
||||
searchRequestIdsOnly = elasticsearchSvc.buildObservationAllFieldsCompositeSearchRequest(1000, searchParameterMap, 3);
|
||||
responseIds = elasticsearchSvc.executeSearchRequest(searchRequestIdsOnly);
|
||||
observationIdsOnly = elasticsearchSvc.buildObservationCompositeResults(responseIds);
|
||||
observationIdsOnly = elasticsearchSvc.executeLastN(searchParameterMap, 3);
|
||||
|
||||
assertEquals(38, observationIdsOnly.size());
|
||||
observationIdOnly = observationIdsOnly.get(0);
|
||||
|
||||
}
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
package ca.uhn.fhir.jpa.dao.lastn;
|
||||
|
||||
import ca.uhn.fhir.jpa.dao.lastn.config.TestObservationIndexSearchConfig;
|
||||
import ca.uhn.fhir.jpa.config.TestR4ConfigWithElasticSearch;
|
||||
import ca.uhn.fhir.jpa.dao.data.IObservationIndexedCodeCodeableConceptSearchParamDao;
|
||||
import ca.uhn.fhir.jpa.dao.lastn.entity.ObservationIndexedCodeCodeableConceptEntity;
|
||||
import ca.uhn.fhir.jpa.dao.lastn.entity.ObservationIndexedSearchParamLastNEntity;
|
||||
|
@ -19,7 +19,7 @@ import java.util.*;
|
|||
import static org.junit.Assert.assertEquals;
|
||||
|
||||
@RunWith(SpringJUnit4ClassRunner.class)
|
||||
@ContextConfiguration(classes = { TestObservationIndexSearchConfig.class })
|
||||
@ContextConfiguration(classes = { TestR4ConfigWithElasticSearch.class })
|
||||
public class PersistObservationIndexedSearchParamLastNTest {
|
||||
|
||||
@Autowired
|
||||
|
|
|
@ -1,23 +0,0 @@
|
|||
package ca.uhn.fhir.jpa.dao.lastn.config;
|
||||
|
||||
import ca.uhn.fhir.jpa.search.lastn.ElasticsearchSvcImpl;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.data.jpa.repository.config.EnableJpaRepositories;
|
||||
import org.springframework.transaction.annotation.EnableTransactionManagement;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
@Configuration
|
||||
@EnableJpaRepositories(entityManagerFactoryRef = "entityManagerFactory",
|
||||
basePackages = {"ca.uhn.fhir.jpa.dao.data"})
|
||||
@EnableTransactionManagement
|
||||
public class TestIntegratedObservationIndexSearchConfig extends TestObservationIndexSearchConfig {
|
||||
|
||||
@Bean()
|
||||
public ElasticsearchSvcImpl myElasticsearchSvc() throws IOException {
|
||||
int elasticsearchPort = embeddedElasticSearch().getHttpPort();
|
||||
return new ElasticsearchSvcImpl(elasticsearchHost, elasticsearchPort, elasticsearchUserId, elasticsearchPassword);
|
||||
}
|
||||
|
||||
}
|
|
@ -1,93 +0,0 @@
|
|||
package ca.uhn.fhir.jpa.dao.lastn.config;
|
||||
|
||||
import ca.uhn.fhir.context.ConfigurationException;
|
||||
import ca.uhn.fhir.jpa.config.TestR4Config;
|
||||
import ca.uhn.fhir.jpa.config.r4.BaseR4Config;
|
||||
import ca.uhn.fhir.jpa.search.LuceneSearchMappingFactory;
|
||||
import ca.uhn.fhir.jpa.search.elastic.ElasticsearchHibernatePropertiesBuilder;
|
||||
import org.hibernate.cfg.AvailableSettings;
|
||||
import org.hibernate.search.elasticsearch.cfg.ElasticsearchIndexStatus;
|
||||
import org.hibernate.search.elasticsearch.cfg.IndexSchemaManagementStrategy;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.data.jpa.repository.config.EnableJpaRepositories;
|
||||
import org.springframework.transaction.annotation.EnableTransactionManagement;
|
||||
import pl.allegro.tech.embeddedelasticsearch.EmbeddedElastic;
|
||||
import pl.allegro.tech.embeddedelasticsearch.PopularProperties;
|
||||
|
||||
import javax.annotation.PreDestroy;
|
||||
import java.io.IOException;
|
||||
import java.util.Properties;
|
||||
import java.util.UUID;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
@Configuration
|
||||
@EnableJpaRepositories(entityManagerFactoryRef = "entityManagerFactory",
|
||||
basePackages = {"ca.uhn.fhir.jpa.dao.data"})
|
||||
@EnableTransactionManagement
|
||||
public class TestObservationIndexSearchConfig extends TestR4Config {
|
||||
|
||||
final String elasticsearchHost = "127.0.0.1";
|
||||
final String elasticsearchUserId = "";
|
||||
final String elasticsearchPassword = "";
|
||||
|
||||
private static final String ELASTIC_VERSION = "6.5.4";
|
||||
|
||||
@Override
|
||||
public Properties jpaProperties() {
|
||||
|
||||
Properties extraProperties = new Properties();
|
||||
extraProperties.put("hibernate.dialect", org.hibernate.dialect.H2Dialect.class);
|
||||
extraProperties.put("hibernate.format_sql", "false");
|
||||
extraProperties.put("hibernate.show_sql", "false");
|
||||
extraProperties.put(AvailableSettings.HBM2DDL_AUTO, "update");
|
||||
extraProperties.put("hibernate.jdbc.batch_size", "5000");
|
||||
extraProperties.put("hibernate.cache.use_query_cache", "false");
|
||||
extraProperties.put("hibernate.cache.use_second_level_cache", "false");
|
||||
extraProperties.put("hibernate.cache.use_structured_entries", "false");
|
||||
extraProperties.put("hibernate.cache.use_minimal_puts", "false");
|
||||
extraProperties.put("hibernate.search.model_mapping", LuceneSearchMappingFactory.class.getName());
|
||||
extraProperties.put("hibernate.search.lucene_version", "LUCENE_CURRENT");
|
||||
extraProperties.put("hibernate.search.default.worker.execution", "sync");
|
||||
|
||||
int elasticsearchPort = embeddedElasticSearch().getHttpPort();
|
||||
new ElasticsearchHibernatePropertiesBuilder()
|
||||
.setDebugRefreshAfterWrite(true)
|
||||
.setDebugPrettyPrintJsonLog(true)
|
||||
.setIndexSchemaManagementStrategy(IndexSchemaManagementStrategy.CREATE)
|
||||
.setIndexManagementWaitTimeoutMillis(10000)
|
||||
.setRequiredIndexStatus(ElasticsearchIndexStatus.YELLOW)
|
||||
.setRestUrl("http://" + elasticsearchHost + ":" + elasticsearchPort)
|
||||
.setUsername(elasticsearchUserId)
|
||||
.setPassword(elasticsearchPassword)
|
||||
.apply(extraProperties);
|
||||
|
||||
extraProperties.setProperty("hibernate.search.default.elasticsearch.refresh_after_write", "true");
|
||||
return extraProperties;
|
||||
}
|
||||
|
||||
@Bean
|
||||
public EmbeddedElastic embeddedElasticSearch() {
|
||||
EmbeddedElastic embeddedElastic;
|
||||
try {
|
||||
embeddedElastic = EmbeddedElastic.builder()
|
||||
.withElasticVersion(ELASTIC_VERSION)
|
||||
.withSetting(PopularProperties.TRANSPORT_TCP_PORT, 0)
|
||||
.withSetting(PopularProperties.HTTP_PORT, 0)
|
||||
.withSetting(PopularProperties.CLUSTER_NAME, UUID.randomUUID())
|
||||
.withStartTimeout(60, TimeUnit.SECONDS)
|
||||
.build()
|
||||
.start();
|
||||
} catch (IOException | InterruptedException e) {
|
||||
throw new ConfigurationException(e);
|
||||
}
|
||||
|
||||
return embeddedElastic;
|
||||
}
|
||||
|
||||
@PreDestroy
|
||||
public void stop() {
|
||||
embeddedElasticSearch().stop();
|
||||
}
|
||||
|
||||
}
|
|
@ -1,134 +0,0 @@
|
|||
package ca.uhn.fhir.jpa.dao.r4;
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.jpa.api.dao.*;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.dao.lastn.config.TestIntegratedObservationIndexSearchConfig;
|
||||
import ca.uhn.fhir.jpa.dao.BaseJpaTest;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
||||
import ca.uhn.fhir.util.TestUtil;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.*;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
|
||||
import org.springframework.transaction.PlatformTransactionManager;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import java.util.*;
|
||||
|
||||
import static org.junit.Assert.*;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
@RunWith(SpringJUnit4ClassRunner.class)
|
||||
@ContextConfiguration(classes = { TestIntegratedObservationIndexSearchConfig.class })
|
||||
public class FhirResourceDaoR4LastNTest extends BaseJpaTest {
|
||||
|
||||
private static final org.slf4j.Logger ourLog = org.slf4j.LoggerFactory.getLogger(FhirResourceDaoR4LastNTest.class);
|
||||
|
||||
@Autowired
|
||||
@Qualifier("myPatientDaoR4")
|
||||
protected IFhirResourceDaoPatient<Patient> myPatientDao;
|
||||
|
||||
@Autowired
|
||||
@Qualifier("myObservationDaoR4")
|
||||
protected IFhirResourceDaoObservation<Observation> myObservationDao;
|
||||
|
||||
@Autowired
|
||||
protected DaoConfig myDaoConfig;
|
||||
|
||||
@Autowired
|
||||
protected FhirContext myFhirCtx;
|
||||
|
||||
@Autowired
|
||||
protected PlatformTransactionManager myPlatformTransactionManager;
|
||||
|
||||
@Override
|
||||
protected FhirContext getContext() {
|
||||
return myFhirCtx;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected PlatformTransactionManager getTxManager() {
|
||||
return myPlatformTransactionManager;
|
||||
}
|
||||
|
||||
@Before
|
||||
public void beforeDisableResultReuse() {
|
||||
myDaoConfig.setReuseCachedSearchResultsForMillis(null);
|
||||
}
|
||||
|
||||
private ServletRequestDetails mockSrd() {
|
||||
return mySrd;
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testLastN() {
|
||||
Patient pt = new Patient();
|
||||
pt.addName().setFamily("Lastn").addGiven("Arthur");
|
||||
IIdType ptId = myPatientDao.create(pt, mockSrd()).getId().toUnqualifiedVersionless();
|
||||
|
||||
Map<String, String[]> requestParameters = new HashMap<>();
|
||||
String[] maxParam = new String[1];
|
||||
maxParam[0] = "1";
|
||||
requestParameters.put("max", maxParam);
|
||||
when(mySrd.getParameters()).thenReturn(requestParameters);
|
||||
|
||||
Map<IIdType, CodeableConcept> observationIds = new HashMap<>();
|
||||
for(int observationIdx = 0 ; observationIdx < 20 ; observationIdx++) {
|
||||
Calendar observationDate = new GregorianCalendar();
|
||||
String idxSuffix = String.valueOf(observationIdx);
|
||||
|
||||
Observation obs = new Observation();
|
||||
obs.getText().setDivAsString("<div>OBSTEXT0_" + idxSuffix + "</div>");
|
||||
obs.getSubject().setReferenceElement(ptId);
|
||||
obs.getCode().addCoding().setCode("CODE_" + idxSuffix).setSystem("http://mycode.com");
|
||||
obs.setValue(new StringType("obsvalue0_" + idxSuffix));
|
||||
observationDate.add(Calendar.HOUR, -2);
|
||||
Date effectiveDtm = observationDate.getTime();
|
||||
obs.setEffective(new DateTimeType(effectiveDtm));
|
||||
observationIds.put(myObservationDao.create(obs, mockSrd()).getId().toUnqualifiedVersionless(), obs.getCode());
|
||||
|
||||
obs = new Observation();
|
||||
obs.getText().setDivAsString("<div>OBSTEXT1_" + idxSuffix + "</div>");
|
||||
obs.getSubject().setReferenceElement(ptId);
|
||||
obs.getCode().addCoding().setCode("CODE_" + idxSuffix).setSystem("http://mycode.com");
|
||||
obs.setValue(new StringType("obsvalue1_" + idxSuffix));
|
||||
observationDate.add(Calendar.HOUR, -1);
|
||||
effectiveDtm = observationDate.getTime();
|
||||
obs.setEffective(new DateTimeType(effectiveDtm));
|
||||
observationIds.put(myObservationDao.create(obs, mockSrd()).getId().toUnqualifiedVersionless(), obs.getCode());
|
||||
|
||||
obs = new Observation();
|
||||
obs.getText().setDivAsString("<div>OBSTEXT2_" + idxSuffix + "</div>");
|
||||
obs.getSubject().setReferenceElement(ptId);
|
||||
obs.getCode().addCoding().setCode("CODE_" + idxSuffix).setSystem("http://mycode.com");
|
||||
obs.setValue(new StringType("obsvalue2_" + idxSuffix));
|
||||
observationDate.add(Calendar.HOUR, -0);
|
||||
effectiveDtm = observationDate.getTime();
|
||||
obs.setEffective(new DateTimeType(effectiveDtm));
|
||||
observationIds.put(myObservationDao.create(obs, mockSrd()).getId().toUnqualifiedVersionless(), obs.getCode());
|
||||
}
|
||||
|
||||
HttpServletRequest request;
|
||||
List<String> actual;
|
||||
SearchParameterMap params = new SearchParameterMap();
|
||||
params.setLastN(true);
|
||||
actual = toUnqualifiedVersionlessIdValues(myObservationDao.observationsLastN(params, mockSrd(),null));
|
||||
|
||||
assertEquals(20, actual.size());
|
||||
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
public static void afterClassClearContext() {
|
||||
TestUtil.clearAllStaticFieldsForUnitTest();
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,472 @@
|
|||
package ca.uhn.fhir.jpa.dao.r4;
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.jpa.api.dao.*;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.config.TestR4ConfigWithElasticsearchClient;
|
||||
import ca.uhn.fhir.jpa.dao.BaseJpaTest;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.rest.param.*;
|
||||
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
||||
import ca.uhn.fhir.util.TestUtil;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.*;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import org.junit.runner.RunWith;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
|
||||
import org.springframework.transaction.PlatformTransactionManager;
|
||||
|
||||
import java.util.*;
|
||||
|
||||
import static org.junit.Assert.*;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
@RunWith(SpringJUnit4ClassRunner.class)
|
||||
@ContextConfiguration(classes = { TestR4ConfigWithElasticsearchClient.class })
|
||||
public class FhirResourceDaoR4SearchLastNTest extends BaseJpaTest {
|
||||
|
||||
@Autowired
|
||||
@Qualifier("myPatientDaoR4")
|
||||
protected IFhirResourceDaoPatient<Patient> myPatientDao;
|
||||
|
||||
@Autowired
|
||||
@Qualifier("myObservationDaoR4")
|
||||
protected IFhirResourceDaoObservation<Observation> myObservationDao;
|
||||
|
||||
@Autowired
|
||||
protected DaoConfig myDaoConfig;
|
||||
|
||||
@Autowired
|
||||
protected FhirContext myFhirCtx;
|
||||
|
||||
@Autowired
|
||||
protected PlatformTransactionManager myPlatformTransactionManager;
|
||||
|
||||
@Override
|
||||
protected FhirContext getContext() {
|
||||
return myFhirCtx;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected PlatformTransactionManager getTxManager() {
|
||||
return myPlatformTransactionManager;
|
||||
}
|
||||
|
||||
private final String observationCd0 = "code0";
|
||||
private final String observationCd1 = "code1";
|
||||
private final String observationCd2 = "code2";
|
||||
|
||||
private final String categoryCd0 = "category0";
|
||||
private final String categoryCd1 = "category1";
|
||||
private final String categoryCd2 = "category2";
|
||||
|
||||
private final String codeSystem = "http://mycode.com";
|
||||
private final String categorySystem = "http://mycategory.com";
|
||||
|
||||
// Using static variables including the flag below so that we can initalize the database and indexes once
|
||||
// (all of the tests only read from the DB and indexes and so no need to re-initialze them for each test).
|
||||
private static boolean dataLoaded = false;
|
||||
|
||||
private static IIdType patient0Id = null;
|
||||
private static IIdType patient1Id = null;
|
||||
private static IIdType patient2Id = null;
|
||||
|
||||
private static final Map<String, String> observationPatientMap = new HashMap<>();
|
||||
private static final Map<String, String> observationCategoryMap = new HashMap<>();
|
||||
private static final Map<String, String> observationCodeMap = new HashMap<>();
|
||||
private static final Map<String, Date> observationEffectiveMap = new HashMap<>();
|
||||
|
||||
@Before
|
||||
public void beforeCreateTestPatientsAndObservations() {
|
||||
// Using a static flag here to ensure that load is only done once. Reason for this is that we cannot
|
||||
// access Autowired objects in @BeforeClass method.
|
||||
if(!dataLoaded) {
|
||||
Patient pt = new Patient();
|
||||
pt.addName().setFamily("Lastn").addGiven("Arthur");
|
||||
patient0Id = myPatientDao.create(pt, mockSrd()).getId().toUnqualifiedVersionless();
|
||||
createObservationsForPatient(patient0Id);
|
||||
pt = new Patient();
|
||||
pt.addName().setFamily("Lastn").addGiven("Johnathan");
|
||||
patient1Id = myPatientDao.create(pt, mockSrd()).getId().toUnqualifiedVersionless();
|
||||
createObservationsForPatient(patient1Id);
|
||||
pt = new Patient();
|
||||
pt.addName().setFamily("Lastn").addGiven("Michael");
|
||||
patient2Id = myPatientDao.create(pt, mockSrd()).getId().toUnqualifiedVersionless();
|
||||
createObservationsForPatient(patient2Id);
|
||||
dataLoaded = true;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private void createObservationsForPatient(IIdType thePatientId) {
|
||||
createFiveObservationsForPatientCodeCategory(thePatientId,observationCd0, categoryCd0, 15);
|
||||
createFiveObservationsForPatientCodeCategory(thePatientId,observationCd0, categoryCd1, 10);
|
||||
createFiveObservationsForPatientCodeCategory(thePatientId,observationCd0, categoryCd2, 5);
|
||||
createFiveObservationsForPatientCodeCategory(thePatientId,observationCd1, categoryCd0, 10);
|
||||
createFiveObservationsForPatientCodeCategory(thePatientId,observationCd1, categoryCd1, 5);
|
||||
createFiveObservationsForPatientCodeCategory(thePatientId,observationCd2, categoryCd2, 5);
|
||||
}
|
||||
|
||||
private void createFiveObservationsForPatientCodeCategory(IIdType thePatientId, String theObservationCode, String theCategoryCode,
|
||||
Integer theTimeOffset) {
|
||||
Calendar observationDate = new GregorianCalendar();
|
||||
|
||||
for (int idx=0; idx<5; idx++ ) {
|
||||
Observation obs = new Observation();
|
||||
obs.getSubject().setReferenceElement(thePatientId);
|
||||
obs.getCode().addCoding().setCode(theObservationCode).setSystem(codeSystem);
|
||||
obs.setValue(new StringType(theObservationCode + "_0"));
|
||||
observationDate.add(Calendar.HOUR, -theTimeOffset+idx);
|
||||
Date effectiveDtm = observationDate.getTime();
|
||||
obs.setEffective(new DateTimeType(effectiveDtm));
|
||||
obs.getCategoryFirstRep().addCoding().setCode(theCategoryCode).setSystem(categorySystem);
|
||||
String observationId = myObservationDao.create(obs, mockSrd()).getId().toUnqualifiedVersionless().getValue();
|
||||
observationPatientMap.put(observationId, thePatientId.getValue());
|
||||
observationCategoryMap.put(observationId, theCategoryCode);
|
||||
observationCodeMap.put(observationId, theObservationCode);
|
||||
observationEffectiveMap.put(observationId, effectiveDtm);
|
||||
}
|
||||
}
|
||||
|
||||
private ServletRequestDetails mockSrd() {
|
||||
return mySrd;
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testLastNNoParams() {
|
||||
|
||||
SearchParameterMap params = new SearchParameterMap();
|
||||
|
||||
List<String> sortedPatients = new ArrayList<>();
|
||||
sortedPatients.add(patient0Id.getValue());
|
||||
sortedPatients.add(patient1Id.getValue());
|
||||
sortedPatients.add(patient2Id.getValue());
|
||||
|
||||
List<String> sortedObservationCodes = new ArrayList<>();
|
||||
sortedObservationCodes.add(observationCd0);
|
||||
sortedObservationCodes.add(observationCd1);
|
||||
sortedObservationCodes.add(observationCd2);
|
||||
|
||||
executeTestCase(params, sortedPatients, sortedObservationCodes, null,90);
|
||||
}
|
||||
|
||||
private void executeTestCase(SearchParameterMap params, List<String> sortedPatients, List<String> sortedObservationCodes, List<String> theCategories, int expectedObservationCount) {
|
||||
List<String> actual;
|
||||
params.setLastN(true);
|
||||
|
||||
Map<String, String[]> requestParameters = new HashMap<>();
|
||||
String[] maxParam = new String[1];
|
||||
maxParam[0] = "100";
|
||||
requestParameters.put("max", maxParam);
|
||||
when(mySrd.getParameters()).thenReturn(requestParameters);
|
||||
|
||||
actual = toUnqualifiedVersionlessIdValues(myObservationDao.observationsLastN(params, mockSrd(),null));
|
||||
|
||||
assertEquals(expectedObservationCount, actual.size());
|
||||
|
||||
validateSorting(actual, sortedPatients, sortedObservationCodes, theCategories);
|
||||
}
|
||||
|
||||
private void validateSorting(List<String> theObservationIds, List<String> thePatientIds, List<String> theCodes, List<String> theCategores) {
|
||||
int theNextObservationIdx = 0;
|
||||
// Validate patient grouping
|
||||
for (String patientId : thePatientIds) {
|
||||
assertEquals(patientId, observationPatientMap.get(theObservationIds.get(theNextObservationIdx)));
|
||||
theNextObservationIdx = validateSortingWithinPatient(theObservationIds,theNextObservationIdx,theCodes, theCategores, patientId);
|
||||
}
|
||||
assertEquals(theObservationIds.size(), theNextObservationIdx);
|
||||
}
|
||||
|
||||
private int validateSortingWithinPatient(List<String> theObservationIds, int theFirstObservationIdxForPatient, List<String> theCodes,
|
||||
List<String> theCategories, String thePatientId) {
|
||||
int theNextObservationIdx = theFirstObservationIdxForPatient;
|
||||
for (String codeValue : theCodes) {
|
||||
assertEquals(codeValue, observationCodeMap.get(theObservationIds.get(theNextObservationIdx)));
|
||||
// Validate sorting within code group
|
||||
theNextObservationIdx = validateSortingWithinCode(theObservationIds,theNextObservationIdx,
|
||||
observationCodeMap.get(theObservationIds.get(theNextObservationIdx)), theCategories, thePatientId);
|
||||
}
|
||||
return theNextObservationIdx;
|
||||
}
|
||||
|
||||
private int validateSortingWithinCode(List<String> theObservationIds, int theFirstObservationIdxForPatientAndCode, String theObservationCode,
|
||||
List<String> theCategories, String thePatientId) {
|
||||
int theNextObservationIdx = theFirstObservationIdxForPatientAndCode;
|
||||
Date lastEffectiveDt = observationEffectiveMap.get(theObservationIds.get(theNextObservationIdx));
|
||||
theNextObservationIdx++;
|
||||
while(theObservationCode.equals(observationCodeMap.get(theObservationIds.get(theNextObservationIdx)))
|
||||
&& thePatientId.equals(observationPatientMap.get(theObservationIds.get(theNextObservationIdx)))) {
|
||||
// Check that effective date is before that of the previous observation.
|
||||
assertTrue(lastEffectiveDt.compareTo(observationEffectiveMap.get(theObservationIds.get(theNextObservationIdx))) > 0);
|
||||
lastEffectiveDt = observationEffectiveMap.get(theObservationIds.get(theNextObservationIdx));
|
||||
|
||||
// Check that observation is in one of the specified categories (if applicable)
|
||||
if (theCategories != null && !theCategories.isEmpty()) {
|
||||
assertTrue(theCategories.contains(observationCategoryMap.get(theObservationIds.get(theNextObservationIdx))));
|
||||
}
|
||||
theNextObservationIdx++;
|
||||
if (theNextObservationIdx >= theObservationIds.size()) {
|
||||
// Have reached the end of the Observation list.
|
||||
break;
|
||||
}
|
||||
}
|
||||
return theNextObservationIdx;
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testLastNSinglePatient() {
|
||||
|
||||
SearchParameterMap params = new SearchParameterMap();
|
||||
ReferenceParam subjectParam = new ReferenceParam("Patient", "", patient0Id.getValue());
|
||||
params.add(Observation.SP_SUBJECT, buildReferenceAndListParam(subjectParam));
|
||||
|
||||
List<String> sortedPatients = new ArrayList<>();
|
||||
sortedPatients.add(patient0Id.getValue());
|
||||
|
||||
List<String> sortedObservationCodes = new ArrayList<>();
|
||||
sortedObservationCodes.add(observationCd0);
|
||||
sortedObservationCodes.add(observationCd1);
|
||||
sortedObservationCodes.add(observationCd2);
|
||||
|
||||
executeTestCase(params, sortedPatients,sortedObservationCodes, null,30);
|
||||
|
||||
params = new SearchParameterMap();
|
||||
ReferenceParam patientParam = new ReferenceParam("Patient", "", patient0Id.getValue());
|
||||
params.add(Observation.SP_PATIENT, buildReferenceAndListParam(patientParam));
|
||||
|
||||
sortedPatients = new ArrayList<>();
|
||||
sortedPatients.add(patient0Id.getValue());
|
||||
|
||||
sortedObservationCodes = new ArrayList<>();
|
||||
sortedObservationCodes.add(observationCd0);
|
||||
sortedObservationCodes.add(observationCd1);
|
||||
sortedObservationCodes.add(observationCd2);
|
||||
|
||||
executeTestCase(params, sortedPatients,sortedObservationCodes, null,30);
|
||||
}
|
||||
|
||||
private ReferenceAndListParam buildReferenceAndListParam(ReferenceParam... theReference) {
|
||||
ReferenceOrListParam myReferenceOrListParam = new ReferenceOrListParam();
|
||||
for (ReferenceParam referenceParam : theReference) {
|
||||
myReferenceOrListParam.addOr(referenceParam);
|
||||
}
|
||||
return new ReferenceAndListParam().addAnd(myReferenceOrListParam);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testLastNMultiplePatients() {
|
||||
|
||||
// Two Subject parameters.
|
||||
SearchParameterMap params = new SearchParameterMap();
|
||||
ReferenceParam subjectParam1 = new ReferenceParam("Patient", "", patient0Id.getValue());
|
||||
ReferenceParam subjectParam2 = new ReferenceParam("Patient", "", patient1Id.getValue());
|
||||
params.add(Observation.SP_SUBJECT, buildReferenceAndListParam(subjectParam1, subjectParam2));
|
||||
|
||||
List<String> sortedPatients = new ArrayList<>();
|
||||
sortedPatients.add(patient0Id.getValue());
|
||||
sortedPatients.add(patient1Id.getValue());
|
||||
|
||||
List<String> sortedObservationCodes = new ArrayList<>();
|
||||
sortedObservationCodes.add(observationCd0);
|
||||
sortedObservationCodes.add(observationCd1);
|
||||
sortedObservationCodes.add(observationCd2);
|
||||
|
||||
executeTestCase(params, sortedPatients, sortedObservationCodes, null,60);
|
||||
|
||||
// Two Patient parameters
|
||||
params = new SearchParameterMap();
|
||||
ReferenceParam patientParam1 = new ReferenceParam("Patient", "", patient0Id.getValue());
|
||||
ReferenceParam patientParam3 = new ReferenceParam("Patient", "", patient2Id.getValue());
|
||||
params.add(Observation.SP_SUBJECT, buildReferenceAndListParam(patientParam1, patientParam3));
|
||||
|
||||
sortedPatients = new ArrayList<>();
|
||||
sortedPatients.add(patient0Id.getValue());
|
||||
sortedPatients.add(patient2Id.getValue());
|
||||
|
||||
executeTestCase(params,sortedPatients, sortedObservationCodes, null,60);
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testLastNSingleCategory() {
|
||||
|
||||
// One category parameter.
|
||||
SearchParameterMap params = new SearchParameterMap();
|
||||
TokenParam categoryParam = new TokenParam(categorySystem, categoryCd0);
|
||||
params.add(Observation.SP_CATEGORY, buildTokenAndListParam(categoryParam));
|
||||
List<String> myCategories = new ArrayList<>();
|
||||
myCategories.add(categoryCd0);
|
||||
|
||||
List<String> sortedPatients = new ArrayList<>();
|
||||
sortedPatients.add(patient0Id.getValue());
|
||||
sortedPatients.add(patient1Id.getValue());
|
||||
sortedPatients.add(patient2Id.getValue());
|
||||
|
||||
List<String> sortedObservationCodes = new ArrayList<>();
|
||||
sortedObservationCodes.add(observationCd0);
|
||||
sortedObservationCodes.add(observationCd1);
|
||||
|
||||
executeTestCase(params, sortedPatients, sortedObservationCodes, myCategories, 30);
|
||||
|
||||
// Another category parameter.
|
||||
params = new SearchParameterMap();
|
||||
categoryParam = new TokenParam(categorySystem, categoryCd2);
|
||||
params.add(Observation.SP_CATEGORY, buildTokenAndListParam(categoryParam));
|
||||
myCategories = new ArrayList<>();
|
||||
myCategories.add(categoryCd2);
|
||||
|
||||
sortedObservationCodes = new ArrayList<>();
|
||||
sortedObservationCodes.add(observationCd0);
|
||||
sortedObservationCodes.add(observationCd2);
|
||||
|
||||
executeTestCase(params, sortedPatients, sortedObservationCodes, myCategories, 30);
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testLastNMultipleCategories() {
|
||||
|
||||
// Two category parameters.
|
||||
SearchParameterMap params = new SearchParameterMap();
|
||||
TokenParam categoryParam1 = new TokenParam(categorySystem, categoryCd0);
|
||||
TokenParam categoryParam2 = new TokenParam(categorySystem, categoryCd1);
|
||||
params.add(Observation.SP_CATEGORY, buildTokenAndListParam(categoryParam1, categoryParam2));
|
||||
List<String> myCategories = new ArrayList<>();
|
||||
myCategories.add(categoryCd0);
|
||||
myCategories.add(categoryCd1);
|
||||
|
||||
List<String> sortedPatients = new ArrayList<>();
|
||||
sortedPatients.add(patient0Id.getValue());
|
||||
sortedPatients.add(patient1Id.getValue());
|
||||
sortedPatients.add(patient2Id.getValue());
|
||||
|
||||
List<String> sortedObservationCodes = new ArrayList<>();
|
||||
sortedObservationCodes.add(observationCd0);
|
||||
sortedObservationCodes.add(observationCd1);
|
||||
|
||||
executeTestCase(params, sortedPatients, sortedObservationCodes, myCategories, 60);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testLastNSingleCode() {
|
||||
|
||||
// One code parameter.
|
||||
SearchParameterMap params = new SearchParameterMap();
|
||||
TokenParam code = new TokenParam(codeSystem, observationCd0);
|
||||
params.add(Observation.SP_CODE, buildTokenAndListParam(code));
|
||||
List<String> sortedObservationCodes = new ArrayList<>();
|
||||
sortedObservationCodes.add(observationCd0);
|
||||
|
||||
List<String> sortedPatients = new ArrayList<>();
|
||||
sortedPatients.add(patient0Id.getValue());
|
||||
sortedPatients.add(patient1Id.getValue());
|
||||
sortedPatients.add(patient2Id.getValue());
|
||||
|
||||
executeTestCase(params, sortedPatients, sortedObservationCodes, null, 45);
|
||||
|
||||
// Another code parameter.
|
||||
params = new SearchParameterMap();
|
||||
code = new TokenParam(codeSystem, observationCd2);
|
||||
params.add(Observation.SP_CODE, buildTokenAndListParam(code));
|
||||
sortedObservationCodes = new ArrayList<>();
|
||||
sortedObservationCodes.add(observationCd2);
|
||||
|
||||
executeTestCase(params, sortedPatients, sortedObservationCodes, null, 15);
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testLastNMultipleCodes() {
|
||||
|
||||
// Two code parameters.
|
||||
SearchParameterMap params = new SearchParameterMap();
|
||||
TokenParam codeParam1 = new TokenParam(codeSystem, observationCd0);
|
||||
TokenParam codeParam2 = new TokenParam(codeSystem, observationCd1);
|
||||
params.add(Observation.SP_CODE, buildTokenAndListParam(codeParam1, codeParam2));
|
||||
List<String> sortedObservationCodes = new ArrayList<>();
|
||||
sortedObservationCodes.add(observationCd0);
|
||||
sortedObservationCodes.add(observationCd1);
|
||||
|
||||
List<String> sortedPatients = new ArrayList<>();
|
||||
sortedPatients.add(patient0Id.getValue());
|
||||
sortedPatients.add(patient1Id.getValue());
|
||||
sortedPatients.add(patient2Id.getValue());
|
||||
|
||||
executeTestCase(params, sortedPatients, sortedObservationCodes, null, 75);
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testLastNSinglePatientCategoryCode() {
|
||||
|
||||
// One patient, category and code.
|
||||
SearchParameterMap params = new SearchParameterMap();
|
||||
ReferenceParam subjectParam = new ReferenceParam("Patient", "", patient0Id.getValue());
|
||||
params.add(Observation.SP_SUBJECT, buildReferenceAndListParam(subjectParam));
|
||||
TokenParam code = new TokenParam(codeSystem, observationCd0);
|
||||
params.add(Observation.SP_CODE, buildTokenAndListParam(code));
|
||||
TokenParam category = new TokenParam(categorySystem, categoryCd2);
|
||||
params.add(Observation.SP_CATEGORY, buildTokenAndListParam(category));
|
||||
|
||||
List<String> sortedPatients = new ArrayList<>();
|
||||
sortedPatients.add(patient0Id.getValue());
|
||||
|
||||
List<String> sortedObservationCodes = new ArrayList<>();
|
||||
sortedObservationCodes.add(observationCd0);
|
||||
|
||||
List<String> myCategories = new ArrayList<>();
|
||||
myCategories.add(categoryCd2);
|
||||
|
||||
executeTestCase(params, sortedPatients, sortedObservationCodes, myCategories, 5);
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testLastNMultiplePatientsCategoriesCodes() {
|
||||
|
||||
// Two patients, categories and codes.
|
||||
SearchParameterMap params = new SearchParameterMap();
|
||||
ReferenceParam subjectParam1 = new ReferenceParam("Patient", "", patient0Id.getValue());
|
||||
ReferenceParam subjectParam2 = new ReferenceParam("Patient", "", patient1Id.getValue());
|
||||
params.add(Observation.SP_SUBJECT, buildReferenceAndListParam(subjectParam1, subjectParam2));
|
||||
List<String> sortedPatients = new ArrayList<>();
|
||||
sortedPatients.add(patient0Id.getValue());
|
||||
sortedPatients.add(patient1Id.getValue());
|
||||
|
||||
TokenParam codeParam1 = new TokenParam(codeSystem, observationCd0);
|
||||
TokenParam codeParam2 = new TokenParam(codeSystem, observationCd2);
|
||||
params.add(Observation.SP_CODE, buildTokenAndListParam(codeParam1, codeParam2));
|
||||
List<String> sortedObservationCodes = new ArrayList<>();
|
||||
sortedObservationCodes.add(observationCd0);
|
||||
sortedObservationCodes.add(observationCd2);
|
||||
|
||||
TokenParam categoryParam1 = new TokenParam(categorySystem, categoryCd1);
|
||||
TokenParam categoryParam2 = new TokenParam(categorySystem, categoryCd2);
|
||||
params.add(Observation.SP_CATEGORY, buildTokenAndListParam(categoryParam1, categoryParam2));
|
||||
List<String> myCategories = new ArrayList<>();
|
||||
myCategories.add(categoryCd1);
|
||||
myCategories.add(categoryCd2);
|
||||
|
||||
executeTestCase(params, sortedPatients, sortedObservationCodes, myCategories, 30);
|
||||
|
||||
}
|
||||
|
||||
private TokenAndListParam buildTokenAndListParam(TokenParam... theToken) {
|
||||
TokenOrListParam myTokenOrListParam = new TokenOrListParam();
|
||||
for (TokenParam tokenParam : theToken) {
|
||||
myTokenOrListParam.addOr(tokenParam);
|
||||
}
|
||||
return new TokenAndListParam().addAnd(myTokenOrListParam);
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
public static void afterClassClearContext() {
|
||||
TestUtil.clearAllStaticFieldsForUnitTest();
|
||||
}
|
||||
|
||||
}
|
|
@ -1,9 +1,7 @@
|
|||
package ca.uhn.fhir.jpa.search.lastn;
|
||||
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.rest.param.ReferenceParam;
|
||||
import ca.uhn.fhir.rest.param.TokenParam;
|
||||
import ca.uhn.fhir.rest.param.TokenParamModifier;
|
||||
import ca.uhn.fhir.rest.param.*;
|
||||
import ca.uhn.fhir.jpa.search.lastn.config.TestElasticsearchConfig;
|
||||
import ca.uhn.fhir.jpa.search.lastn.json.CodeJson;
|
||||
import ca.uhn.fhir.jpa.search.lastn.json.ObservationJson;
|
||||
|
@ -11,9 +9,8 @@ import com.fasterxml.jackson.annotation.JsonInclude;
|
|||
import com.fasterxml.jackson.core.JsonGenerator;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.fasterxml.jackson.databind.SerializationFeature;
|
||||
import org.hl7.fhir.r4.model.Observation;
|
||||
import org.junit.*;
|
||||
import org.shadehapi.elasticsearch.action.search.SearchRequest;
|
||||
import org.shadehapi.elasticsearch.action.search.SearchResponse;
|
||||
import org.hl7.fhir.r4.model.CodeableConcept;
|
||||
import org.hl7.fhir.r4.model.Coding;
|
||||
import org.junit.runner.RunWith;
|
||||
|
@ -28,7 +25,7 @@ import static ca.uhn.fhir.jpa.search.lastn.IndexConstants.*;
|
|||
import static org.junit.Assert.*;
|
||||
|
||||
@RunWith(SpringJUnit4ClassRunner.class)
|
||||
@ContextConfiguration(classes = { TestElasticsearchConfig.class } )
|
||||
@ContextConfiguration(classes = {TestElasticsearchConfig.class})
|
||||
public class LastNElasticsearchSvcMultipleObservationsTest {
|
||||
|
||||
@Autowired
|
||||
|
@ -36,7 +33,7 @@ public class LastNElasticsearchSvcMultipleObservationsTest {
|
|||
|
||||
private static ObjectMapper ourMapperNonPrettyPrint;
|
||||
|
||||
private Map<String,Map<String,List<Date>>> createdPatientObservationMap = new HashMap<>();
|
||||
private final Map<String, Map<String, List<Date>>> createdPatientObservationMap = new HashMap<>();
|
||||
|
||||
|
||||
@BeforeClass
|
||||
|
@ -59,27 +56,12 @@ public class LastNElasticsearchSvcMultipleObservationsTest {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testLastNNoCriteriaQuery() throws IOException {
|
||||
|
||||
// execute Observation ID search (Terms Aggregation) last 3 observations for each patient
|
||||
SearchRequest searchRequestIdsOnly = elasticsearchSvc.buildObservationTermsSearchRequest(1000, null, 3);
|
||||
SearchResponse responseIds = elasticsearchSvc.executeSearchRequest(searchRequestIdsOnly);
|
||||
List<ObservationJson> observationIdsOnly = elasticsearchSvc.buildObservationTermsResults(responseIds);
|
||||
|
||||
validateQueryResponse(observationIdsOnly);
|
||||
public void testLastNNoCriteriaQuery() {
|
||||
|
||||
// execute Observation ID search (Composite Aggregation) last 3 observations for each patient
|
||||
searchRequestIdsOnly = elasticsearchSvc.buildObservationCompositeSearchRequest(1000, null, 3);
|
||||
responseIds = elasticsearchSvc.executeSearchRequest(searchRequestIdsOnly);
|
||||
observationIdsOnly = elasticsearchSvc.buildObservationCompositeResults(responseIds);
|
||||
List<ObservationJson> observations = elasticsearchSvc.executeLastNWithAllFields(null, 3);
|
||||
|
||||
validateQueryResponse(observationIdsOnly);
|
||||
|
||||
// Retrieve all Observation codes
|
||||
SearchRequest searchRequest = elasticsearchSvc.buildObservationCodesSearchRequest(1000);
|
||||
SearchResponse response = elasticsearchSvc.executeSearchRequest(searchRequest);
|
||||
List<CodeJson> codes = elasticsearchSvc.buildCodeResult(response);
|
||||
assertEquals(2, codes.size());
|
||||
validateQueryResponse(observations);
|
||||
|
||||
}
|
||||
|
||||
|
@ -88,7 +70,7 @@ public class LastNElasticsearchSvcMultipleObservationsTest {
|
|||
|
||||
// Observation documents should be grouped by subject, then by observation code, and then sorted by effective date/time
|
||||
// within each observation code. Verify the grouping by creating a nested Map.
|
||||
Map<String,Map<String, List<Date>>> queriedPatientObservationMap = new HashMap<>();
|
||||
Map<String, Map<String, List<Date>>> queriedPatientObservationMap = new HashMap<>();
|
||||
ObservationJson previousObservationJson = null;
|
||||
for (ObservationJson observationJson : observationIdsOnly) {
|
||||
assertNotNull(observationJson.getIdentifier());
|
||||
|
@ -99,7 +81,7 @@ public class LastNElasticsearchSvcMultipleObservationsTest {
|
|||
ArrayList<Date> observationDates = new ArrayList<>();
|
||||
observationDates.add(observationJson.getEffectiveDtm());
|
||||
Map<String, List<Date>> codeObservationMap = new HashMap<>();
|
||||
codeObservationMap.put(observationJson.getCode_concept_id(),observationDates);
|
||||
codeObservationMap.put(observationJson.getCode_concept_id(), observationDates);
|
||||
queriedPatientObservationMap.put(observationJson.getSubject(), codeObservationMap);
|
||||
} else if (observationJson.getSubject().equals(previousObservationJson.getSubject())) {
|
||||
if (observationJson.getCode_concept_id().equals(previousObservationJson.getCode_concept_id())) {
|
||||
|
@ -111,7 +93,7 @@ public class LastNElasticsearchSvcMultipleObservationsTest {
|
|||
assertFalse(codeObservationDateMap.containsKey(observationJson.getCode_concept_id()));
|
||||
ArrayList<Date> observationDates = new ArrayList<>();
|
||||
observationDates.add(observationJson.getEffectiveDtm());
|
||||
codeObservationDateMap.put(observationJson.getCode_concept_id(),observationDates);
|
||||
codeObservationDateMap.put(observationJson.getCode_concept_id(), observationDates);
|
||||
}
|
||||
} else {
|
||||
// Ensure that subject/patient was not already retrieved out of order
|
||||
|
@ -119,20 +101,20 @@ public class LastNElasticsearchSvcMultipleObservationsTest {
|
|||
ArrayList<Date> observationDates = new ArrayList<>();
|
||||
observationDates.add(observationJson.getEffectiveDtm());
|
||||
Map<String, List<Date>> codeObservationMap = new HashMap<>();
|
||||
codeObservationMap.put(observationJson.getCode_concept_id(),observationDates);
|
||||
codeObservationMap.put(observationJson.getCode_concept_id(), observationDates);
|
||||
queriedPatientObservationMap.put(observationJson.getSubject(), codeObservationMap);
|
||||
}
|
||||
previousObservationJson = observationJson;
|
||||
}
|
||||
|
||||
// Finally check that only the most recent effective date/time values were returned and in the correct order.
|
||||
for(String subjectId : queriedPatientObservationMap.keySet()) {
|
||||
for (String subjectId : queriedPatientObservationMap.keySet()) {
|
||||
Map<String, List<Date>> queriedObservationCodeMap = queriedPatientObservationMap.get(subjectId);
|
||||
Map<String, List<Date>> createdObservationCodeMap = createdPatientObservationMap.get(subjectId);
|
||||
for(String observationCode : queriedObservationCodeMap.keySet()) {
|
||||
for (String observationCode : queriedObservationCodeMap.keySet()) {
|
||||
List<Date> queriedObservationDates = queriedObservationCodeMap.get(observationCode);
|
||||
List<Date> createdObservationDates = createdObservationCodeMap.get(observationCode);
|
||||
for (int dateIdx=0; dateIdx<queriedObservationDates.size(); dateIdx++) {
|
||||
for (int dateIdx = 0; dateIdx < queriedObservationDates.size(); dateIdx++) {
|
||||
assertEquals(createdObservationDates.get(dateIdx), queriedObservationDates.get(dateIdx));
|
||||
}
|
||||
}
|
||||
|
@ -141,80 +123,149 @@ public class LastNElasticsearchSvcMultipleObservationsTest {
|
|||
}
|
||||
|
||||
@Test
|
||||
public void testLastNMultiPatientMultiCodeHashMultiCategoryHash() throws IOException {
|
||||
// Include subject and patient
|
||||
public void testLastNMultiPatientMultiCodeHashMultiCategoryHash() {
|
||||
// Multiple Subject references
|
||||
SearchParameterMap searchParameterMap = new SearchParameterMap();
|
||||
ReferenceParam subjectParam = new ReferenceParam("Patient", "", "3");
|
||||
searchParameterMap.add("subject", subjectParam);
|
||||
ReferenceParam patientParam = new ReferenceParam("Patient", "", "8");
|
||||
searchParameterMap.add("patient", patientParam);
|
||||
TokenParam categoryParam = new TokenParam("http://mycodes.org/fhir/observation-category", "test-heart-rate");
|
||||
searchParameterMap.add("category", categoryParam);
|
||||
TokenParam codeParam = new TokenParam("http://mycodes.org/fhir/observation-code", "test-code-1");
|
||||
searchParameterMap.add("code", codeParam);
|
||||
ReferenceParam subjectParam1 = new ReferenceParam("Patient", "", "3");
|
||||
ReferenceParam subjectParam2 = new ReferenceParam("Patient", "", "5");
|
||||
searchParameterMap.add(Observation.SP_SUBJECT, buildReferenceAndListParam(subjectParam1, subjectParam2));
|
||||
TokenParam categoryParam1 = new TokenParam("http://mycodes.org/fhir/observation-category", "test-heart-rate");
|
||||
TokenParam categoryParam2 = new TokenParam("http://mycodes.org/fhir/observation-category", "test-vital-signs");
|
||||
searchParameterMap.add(Observation.SP_CATEGORY, buildTokenAndListParam(categoryParam1, categoryParam2));
|
||||
TokenParam codeParam1 = new TokenParam("http://mycodes.org/fhir/observation-code", "test-code-1");
|
||||
TokenParam codeParam2 = new TokenParam("http://mycodes.org/fhir/observation-code", "test-code-2");
|
||||
searchParameterMap.add(Observation.SP_CODE, buildTokenAndListParam(codeParam1, codeParam2));
|
||||
|
||||
SearchRequest searchRequestIdsOnly = elasticsearchSvc.buildObservationCompositeSearchRequest(1000, searchParameterMap, 100);
|
||||
SearchResponse responseIds = elasticsearchSvc.executeSearchRequest(searchRequestIdsOnly);
|
||||
List<ObservationJson> observationIdsOnly = elasticsearchSvc.buildObservationCompositeResults(responseIds);
|
||||
List<String> observations = elasticsearchSvc.executeLastN(searchParameterMap, 100);
|
||||
|
||||
assertEquals(10, observationIdsOnly.size());
|
||||
assertEquals(20, observations.size());
|
||||
|
||||
// Repeat with multiple Patient parameter
|
||||
searchParameterMap = new SearchParameterMap();
|
||||
ReferenceParam patientParam1 = new ReferenceParam("Patient", "", "8");
|
||||
ReferenceParam patientParam2 = new ReferenceParam("Patient", "", "6");
|
||||
searchParameterMap.add(Observation.SP_PATIENT, buildReferenceAndListParam(patientParam1, patientParam2));
|
||||
searchParameterMap.add(Observation.SP_CATEGORY, buildTokenAndListParam(categoryParam1, categoryParam2));
|
||||
searchParameterMap.add(Observation.SP_CODE, buildTokenAndListParam(codeParam1, codeParam2));
|
||||
|
||||
observations = elasticsearchSvc.executeLastN(searchParameterMap, 100);
|
||||
|
||||
assertEquals(20, observations.size());
|
||||
|
||||
}
|
||||
|
||||
private ReferenceAndListParam buildReferenceAndListParam(ReferenceParam... theReference) {
|
||||
ReferenceOrListParam myReferenceOrListParam = new ReferenceOrListParam();
|
||||
for (ReferenceParam referenceParam : theReference) {
|
||||
myReferenceOrListParam.addOr(referenceParam);
|
||||
}
|
||||
return new ReferenceAndListParam().addAnd(myReferenceOrListParam);
|
||||
}
|
||||
|
||||
private TokenAndListParam buildTokenAndListParam(TokenParam... theToken) {
|
||||
TokenOrListParam myTokenOrListParam = new TokenOrListParam();
|
||||
for (TokenParam tokenParam : theToken) {
|
||||
myTokenOrListParam.addOr(tokenParam);
|
||||
}
|
||||
return new TokenAndListParam().addAnd(myTokenOrListParam);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testLastNCodeCodeOnlyCategoryCodeOnly() throws IOException {
|
||||
// Include subject and patient
|
||||
public void testLastNCodeCodeOnlyCategoryCodeOnly() {
|
||||
// Include subject
|
||||
SearchParameterMap searchParameterMap = new SearchParameterMap();
|
||||
ReferenceParam subjectParam = new ReferenceParam("Patient", "", "3");
|
||||
searchParameterMap.add("subject", subjectParam);
|
||||
TokenParam categoryParam = new TokenParam ("test-heart-rate");
|
||||
searchParameterMap.add("category", categoryParam);
|
||||
searchParameterMap.add(Observation.SP_SUBJECT, buildReferenceAndListParam(subjectParam));
|
||||
TokenParam categoryParam = new TokenParam("test-heart-rate");
|
||||
searchParameterMap.add(Observation.SP_CATEGORY, buildTokenAndListParam(categoryParam));
|
||||
TokenParam codeParam = new TokenParam("test-code-1");
|
||||
searchParameterMap.add("code", codeParam);
|
||||
searchParameterMap.add(Observation.SP_CODE, buildTokenAndListParam(codeParam));
|
||||
|
||||
SearchRequest searchRequestIdsOnly = elasticsearchSvc.buildObservationCompositeSearchRequest(1000, searchParameterMap, 100);
|
||||
SearchResponse responseIds = elasticsearchSvc.executeSearchRequest(searchRequestIdsOnly);
|
||||
List<ObservationJson> observationIdsOnly = elasticsearchSvc.buildObservationCompositeResults(responseIds);
|
||||
List<String> observations = elasticsearchSvc.executeLastN(searchParameterMap, 100);
|
||||
|
||||
assertEquals(5, observationIdsOnly.size());
|
||||
assertEquals(5, observations.size());
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testLastNCodeSystemOnlyCategorySystemOnly() throws IOException {
|
||||
public void testLastNCodeSystemOnlyCategorySystemOnly() {
|
||||
// Include subject and patient
|
||||
SearchParameterMap searchParameterMap = new SearchParameterMap();
|
||||
ReferenceParam subjectParam = new ReferenceParam("Patient", "", "3");
|
||||
searchParameterMap.add("subject", subjectParam);
|
||||
searchParameterMap.add(Observation.SP_SUBJECT, buildReferenceAndListParam(subjectParam));
|
||||
TokenParam categoryParam = new TokenParam("http://mycodes.org/fhir/observation-category", null);
|
||||
searchParameterMap.add("category", categoryParam);
|
||||
searchParameterMap.add(Observation.SP_CATEGORY, buildTokenAndListParam(categoryParam));
|
||||
TokenParam codeParam = new TokenParam("http://mycodes.org/fhir/observation-code", null);
|
||||
searchParameterMap.add("code", codeParam);
|
||||
searchParameterMap.add(Observation.SP_CODE, buildTokenAndListParam(codeParam));
|
||||
|
||||
SearchRequest searchRequestIdsOnly = elasticsearchSvc.buildObservationCompositeSearchRequest(1000, searchParameterMap, 100);
|
||||
SearchResponse responseIds = elasticsearchSvc.executeSearchRequest(searchRequestIdsOnly);
|
||||
List<ObservationJson> observationIdsOnly = elasticsearchSvc.buildObservationCompositeResults(responseIds);
|
||||
List<String> observations = elasticsearchSvc.executeLastN(searchParameterMap, 100);
|
||||
|
||||
assertEquals(10, observationIdsOnly.size());
|
||||
assertEquals(10, observations.size());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testLastNCodeCodeTextCategoryTextOnly() throws IOException {
|
||||
public void testLastNCodeCodeTextCategoryTextOnly() {
|
||||
SearchParameterMap searchParameterMap = new SearchParameterMap();
|
||||
ReferenceParam subjectParam = new ReferenceParam("Patient", "", "3");
|
||||
searchParameterMap.add("subject", subjectParam);
|
||||
searchParameterMap.add(Observation.SP_SUBJECT, buildReferenceAndListParam(subjectParam));
|
||||
TokenParam categoryParam = new TokenParam("test-heart-rate display");
|
||||
categoryParam.setModifier(TokenParamModifier.TEXT);
|
||||
searchParameterMap.add("category", categoryParam);
|
||||
searchParameterMap.add(Observation.SP_CATEGORY, buildTokenAndListParam(categoryParam));
|
||||
TokenParam codeParam = new TokenParam("test-code-1 display");
|
||||
codeParam.setModifier(TokenParamModifier.TEXT);
|
||||
searchParameterMap.add("code", codeParam);
|
||||
searchParameterMap.add(Observation.SP_CODE, buildTokenAndListParam(codeParam));
|
||||
|
||||
SearchRequest searchRequestIdsOnly = elasticsearchSvc.buildObservationCompositeSearchRequest(1000, searchParameterMap, 100);
|
||||
SearchResponse responseIds = elasticsearchSvc.executeSearchRequest(searchRequestIdsOnly);
|
||||
List<ObservationJson> observationIdsOnly = elasticsearchSvc.buildObservationCompositeResults(responseIds);
|
||||
List<String> observations = elasticsearchSvc.executeLastN(searchParameterMap, 100);
|
||||
|
||||
assertEquals(5, observationIdsOnly.size());
|
||||
assertEquals(5, observations.size());
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testLastNNoMatchQueries() {
|
||||
// Invalid Patient
|
||||
SearchParameterMap searchParameterMap = new SearchParameterMap();
|
||||
ReferenceParam patientParam = new ReferenceParam("Patient", "", "10");
|
||||
searchParameterMap.add(Observation.SP_PATIENT, buildReferenceAndListParam(patientParam));
|
||||
TokenParam categoryParam = new TokenParam("http://mycodes.org/fhir/observation-category", "test-heart-rate");
|
||||
searchParameterMap.add(Observation.SP_CATEGORY, buildTokenAndListParam(categoryParam));
|
||||
TokenParam codeParam = new TokenParam("http://mycodes.org/fhir/observation-code", "test-code-1");
|
||||
searchParameterMap.add(Observation.SP_CODE, buildTokenAndListParam(codeParam));
|
||||
List<String> observations = elasticsearchSvc.executeLastN(searchParameterMap, 100);
|
||||
assertEquals(0, observations.size());
|
||||
|
||||
// Invalid subject
|
||||
searchParameterMap = new SearchParameterMap();
|
||||
ReferenceParam subjectParam = new ReferenceParam("Patient", "", "10");
|
||||
searchParameterMap.add(Observation.SP_SUBJECT, buildReferenceAndListParam(subjectParam));
|
||||
categoryParam = new TokenParam("http://mycodes.org/fhir/observation-category", "test-heart-rate");
|
||||
searchParameterMap.add(Observation.SP_CATEGORY, buildTokenAndListParam(categoryParam));
|
||||
codeParam = new TokenParam("http://mycodes.org/fhir/observation-code", "test-code-1");
|
||||
searchParameterMap.add(Observation.SP_CODE, buildTokenAndListParam(codeParam));
|
||||
observations = elasticsearchSvc.executeLastN(searchParameterMap, 100);
|
||||
assertEquals(0, observations.size());
|
||||
|
||||
// Invalid observation code
|
||||
searchParameterMap = new SearchParameterMap();
|
||||
subjectParam = new ReferenceParam("Patient", "", "9");
|
||||
searchParameterMap.add(Observation.SP_SUBJECT, buildReferenceAndListParam(subjectParam));
|
||||
categoryParam = new TokenParam("http://mycodes.org/fhir/observation-category", "test-heart-rate");
|
||||
searchParameterMap.add(Observation.SP_CATEGORY, buildTokenAndListParam(categoryParam));
|
||||
codeParam = new TokenParam("http://mycodes.org/fhir/observation-code", "test-code-999");
|
||||
searchParameterMap.add(Observation.SP_CODE, buildTokenAndListParam(codeParam));
|
||||
observations = elasticsearchSvc.executeLastN(searchParameterMap, 100);
|
||||
assertEquals(0, observations.size());
|
||||
|
||||
// Invalid category code
|
||||
searchParameterMap = new SearchParameterMap();
|
||||
subjectParam = new ReferenceParam("Patient", "", "9");
|
||||
searchParameterMap.add(Observation.SP_SUBJECT, buildReferenceAndListParam(subjectParam));
|
||||
categoryParam = new TokenParam("http://mycodes.org/fhir/observation-category", "test-not-a-category");
|
||||
searchParameterMap.add(Observation.SP_CATEGORY, buildTokenAndListParam(categoryParam));
|
||||
codeParam = new TokenParam("http://mycodes.org/fhir/observation-code", "test-code-1");
|
||||
searchParameterMap.add(Observation.SP_CODE, buildTokenAndListParam(codeParam));
|
||||
observations = elasticsearchSvc.executeLastN(searchParameterMap, 100);
|
||||
assertEquals(0, observations.size());
|
||||
|
||||
}
|
||||
|
||||
|
@ -258,18 +309,18 @@ public class LastNElasticsearchSvcMultipleObservationsTest {
|
|||
categoryCodeableConcept2.setCoding(category2);
|
||||
categoryConcepts2.add(categoryCodeableConcept2);
|
||||
|
||||
for (int patientCount = 0; patientCount < 10 ; patientCount++) {
|
||||
for (int patientCount = 0; patientCount < 10; patientCount++) {
|
||||
|
||||
String subject = String.valueOf(patientCount);
|
||||
|
||||
for ( int entryCount = 0; entryCount < 10 ; entryCount++ ) {
|
||||
for (int entryCount = 0; entryCount < 10; entryCount++) {
|
||||
|
||||
ObservationJson observationJson = new ObservationJson();
|
||||
String identifier = String.valueOf((entryCount + patientCount*10));
|
||||
String identifier = String.valueOf((entryCount + patientCount * 10));
|
||||
observationJson.setIdentifier(identifier);
|
||||
observationJson.setSubject(subject);
|
||||
|
||||
if (entryCount%2 == 1) {
|
||||
if (entryCount % 2 == 1) {
|
||||
observationJson.setCategories(categoryConcepts1);
|
||||
observationJson.setCode(codeableConceptField1);
|
||||
observationJson.setCode_concept_id(codeableConceptId1);
|
||||
|
@ -287,7 +338,7 @@ public class LastNElasticsearchSvcMultipleObservationsTest {
|
|||
observationJson.setEffectiveDtm(effectiveDtm);
|
||||
|
||||
String observationDocument = ourMapperNonPrettyPrint.writeValueAsString(observationJson);
|
||||
assertTrue(elasticsearchSvc.performIndex(OBSERVATION_INDEX, identifier,observationDocument, OBSERVATION_DOCUMENT_TYPE));
|
||||
assertTrue(elasticsearchSvc.performIndex(OBSERVATION_INDEX, identifier, observationDocument, OBSERVATION_DOCUMENT_TYPE));
|
||||
|
||||
if (createdPatientObservationMap.containsKey(subject)) {
|
||||
Map<String, List<Date>> observationCodeMap = createdPatientObservationMap.get(subject);
|
||||
|
@ -296,7 +347,7 @@ public class LastNElasticsearchSvcMultipleObservationsTest {
|
|||
// Want dates to be sorted in descending order
|
||||
observationDates.add(0, effectiveDtm);
|
||||
// Only keep the three most recent dates for later check.
|
||||
if(observationDates.size() > 3) {
|
||||
if (observationDates.size() > 3) {
|
||||
observationDates.remove(3);
|
||||
}
|
||||
} else {
|
||||
|
|
|
@ -5,14 +5,12 @@ import ca.uhn.fhir.jpa.search.lastn.json.CodeJson;
|
|||
import ca.uhn.fhir.jpa.search.lastn.json.ObservationJson;
|
||||
import ca.uhn.fhir.jpa.search.lastn.util.CodeSystemHash;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.rest.param.ReferenceParam;
|
||||
import ca.uhn.fhir.rest.param.TokenParam;
|
||||
import ca.uhn.fhir.model.dstu2.resource.Observation;
|
||||
import ca.uhn.fhir.rest.param.*;
|
||||
import com.fasterxml.jackson.annotation.JsonInclude;
|
||||
import com.fasterxml.jackson.core.JsonGenerator;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.fasterxml.jackson.databind.SerializationFeature;
|
||||
import org.shadehapi.elasticsearch.action.search.SearchRequest;
|
||||
import org.shadehapi.elasticsearch.action.search.SearchResponse;
|
||||
import org.hl7.fhir.r4.model.CodeableConcept;
|
||||
import org.hl7.fhir.r4.model.Coding;
|
||||
import org.junit.*;
|
||||
|
@ -28,7 +26,7 @@ import static org.junit.Assert.assertEquals;
|
|||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
@RunWith(SpringJUnit4ClassRunner.class)
|
||||
@ContextConfiguration(classes = { TestElasticsearchConfig.class } )
|
||||
@ContextConfiguration(classes = {TestElasticsearchConfig.class})
|
||||
public class LastNElasticsearchSvcSingleObservationTest {
|
||||
|
||||
@Autowired
|
||||
|
@ -37,8 +35,6 @@ public class LastNElasticsearchSvcSingleObservationTest {
|
|||
static ObjectMapper ourMapperNonPrettyPrint;
|
||||
|
||||
final String RESOURCEPID = "123";
|
||||
// final String SUBJECTID = "4567";
|
||||
// final String SUBJECTTYPEANDID = "Patient/4567";
|
||||
final String SUBJECTID = "Patient/4567";
|
||||
final Date EFFECTIVEDTM = new Date();
|
||||
final String FIRSTCATEGORYTEXT = "Test Codeable Concept Field for first category";
|
||||
|
@ -87,12 +83,6 @@ public class LastNElasticsearchSvcSingleObservationTest {
|
|||
|
||||
}
|
||||
|
||||
// @Before
|
||||
public void before() throws IOException {
|
||||
elasticsearchSvc.deleteAllDocuments(IndexConstants.OBSERVATION_INDEX);
|
||||
elasticsearchSvc.deleteAllDocuments(IndexConstants.CODE_INDEX);
|
||||
}
|
||||
|
||||
@After
|
||||
public void after() throws IOException {
|
||||
elasticsearchSvc.deleteAllDocuments(IndexConstants.OBSERVATION_INDEX);
|
||||
|
@ -106,41 +96,20 @@ public class LastNElasticsearchSvcSingleObservationTest {
|
|||
|
||||
SearchParameterMap searchParameterMap = new SearchParameterMap();
|
||||
ReferenceParam subjectParam = new ReferenceParam("Patient", "", SUBJECTID);
|
||||
searchParameterMap.add("subject", subjectParam);
|
||||
searchParameterMap.add(Observation.SP_SUBJECT, new ReferenceAndListParam().addAnd(new ReferenceOrListParam().addOr(subjectParam)));
|
||||
TokenParam categoryParam = new TokenParam(CATEGORYFIRSTCODINGSYSTEM, FIRSTCATEGORYFIRSTCODINGCODE);
|
||||
searchParameterMap.add("category", categoryParam);
|
||||
searchParameterMap.add(Observation.SP_CATEGORY, new TokenAndListParam().addAnd(new TokenOrListParam().addOr(categoryParam)));
|
||||
TokenParam codeParam = new TokenParam(CODEFIRSTCODINGSYSTEM, CODEFIRSTCODINGCODE);
|
||||
searchParameterMap.add("code", codeParam);
|
||||
searchParameterMap.add(Observation.SP_CODE, new TokenAndListParam().addAnd(new TokenOrListParam().addOr(codeParam)));
|
||||
|
||||
// execute Observation ID search - Terms Aggregation
|
||||
SearchRequest searchRequestIdsOnly = elasticsearchSvc.buildObservationTermsSearchRequest(1000, searchParameterMap, 3);
|
||||
SearchResponse responseIds = elasticsearchSvc.executeSearchRequest(searchRequestIdsOnly);
|
||||
List<ObservationJson> observationIdsOnly = elasticsearchSvc.buildObservationTermsResults(responseIds);
|
||||
// execute Observation ID search
|
||||
List<String> observationIdsOnly = elasticsearchSvc.executeLastN(searchParameterMap, 3);
|
||||
|
||||
assertEquals(1, observationIdsOnly.size());
|
||||
ObservationJson observationIdOnly = observationIdsOnly.get(0);
|
||||
assertEquals(RESOURCEPID, observationIdOnly.getIdentifier());
|
||||
assertEquals(RESOURCEPID, observationIdsOnly.get(0));
|
||||
|
||||
// execute Observation ID search - Composite Aggregation
|
||||
searchRequestIdsOnly = elasticsearchSvc.buildObservationCompositeSearchRequest(1000, searchParameterMap, 3);
|
||||
responseIds = elasticsearchSvc.executeSearchRequest(searchRequestIdsOnly);
|
||||
observationIdsOnly = elasticsearchSvc.buildObservationCompositeResults(responseIds);
|
||||
|
||||
assertEquals(1, observationIdsOnly.size());
|
||||
observationIdOnly = observationIdsOnly.get(0);
|
||||
assertEquals(RESOURCEPID, observationIdOnly.getIdentifier());
|
||||
|
||||
// execute full Observation search - Terms Aggregation
|
||||
SearchRequest searchRequestAllFields = elasticsearchSvc.buildObservationAllFieldsTermsSearchRequest(1000, searchParameterMap, 3);
|
||||
SearchResponse response = elasticsearchSvc.executeSearchRequest(searchRequestAllFields);
|
||||
List<ObservationJson> observations = elasticsearchSvc.buildObservationTermsResults(response);
|
||||
|
||||
validateFullObservationSearch(observations);
|
||||
|
||||
// execute full Observation search - Composite Aggregation
|
||||
searchRequestAllFields= elasticsearchSvc.buildObservationAllFieldsCompositeSearchRequest(1000, searchParameterMap, 3);
|
||||
response = elasticsearchSvc.executeSearchRequest(searchRequestAllFields);
|
||||
observations = elasticsearchSvc.buildObservationCompositeResults(response);
|
||||
// execute Observation search for all search fields
|
||||
List<ObservationJson> observations = elasticsearchSvc.executeLastNWithAllFields(searchParameterMap, 3);
|
||||
|
||||
validateFullObservationSearch(observations);
|
||||
}
|
||||
|
@ -151,20 +120,19 @@ public class LastNElasticsearchSvcSingleObservationTest {
|
|||
ObservationJson observation = observations.get(0);
|
||||
assertEquals(RESOURCEPID, observation.getIdentifier());
|
||||
|
||||
// assertEquals(SUBJECTTYPEANDID, observation.getSubject());
|
||||
assertEquals(SUBJECTID, observation.getSubject());
|
||||
assertEquals(RESOURCEPID, observation.getIdentifier());
|
||||
assertEquals(EFFECTIVEDTM, observation.getEffectiveDtm());
|
||||
assertEquals(OBSERVATIONSINGLECODEID, observation.getCode_concept_id());
|
||||
|
||||
List<String> category_concept_text_values = observation.getCategory_concept_text();
|
||||
assertEquals(3,category_concept_text_values.size());
|
||||
assertEquals(3, category_concept_text_values.size());
|
||||
assertEquals(FIRSTCATEGORYTEXT, category_concept_text_values.get(0));
|
||||
assertEquals(SECONDCATEGORYTEXT, category_concept_text_values.get(1));
|
||||
assertEquals(THIRDCATEGORYTEXT, category_concept_text_values.get(2));
|
||||
|
||||
List<List<String>> category_codings_systems = observation.getCategory_coding_system();
|
||||
assertEquals(3,category_codings_systems.size());
|
||||
assertEquals(3, category_codings_systems.size());
|
||||
List<String> category_coding_systems = category_codings_systems.get(0);
|
||||
assertEquals(3, category_coding_systems.size());
|
||||
assertEquals(CATEGORYFIRSTCODINGSYSTEM, category_coding_systems.get(0));
|
||||
|
@ -275,9 +243,7 @@ public class LastNElasticsearchSvcSingleObservationTest {
|
|||
assertEquals(String.valueOf(CodeSystemHash.hashCodeSystem(CODEFIRSTCODINGSYSTEM, CODEFIRSTCODINGCODE)), code_coding_code_system_hash);
|
||||
|
||||
// Retrieve all Observation codes
|
||||
SearchRequest searchRequest = elasticsearchSvc.buildObservationCodesSearchRequest(1000);
|
||||
SearchResponse response = elasticsearchSvc.executeSearchRequest(searchRequest);
|
||||
List<CodeJson> codes = elasticsearchSvc.buildCodeResult(response);
|
||||
List<CodeJson> codes = elasticsearchSvc.queryAllIndexedObservationCodes(1000);
|
||||
assertEquals(1, codes.size());
|
||||
CodeJson persistedObservationCode = codes.get(0);
|
||||
|
||||
|
@ -289,7 +255,7 @@ public class LastNElasticsearchSvcSingleObservationTest {
|
|||
List<String> persistedCodeCodingSystems = persistedObservationCode.getCoding_system();
|
||||
// TODO: Temporary changes until find a solution for addressing Observation Code with multiple codings.
|
||||
// assertEquals(3,persistedCodeCodingSystems.size());
|
||||
assertEquals(1,persistedCodeCodingSystems.size());
|
||||
assertEquals(1, persistedCodeCodingSystems.size());
|
||||
assertEquals(CODEFIRSTCODINGSYSTEM, persistedCodeCodingSystems.get(0));
|
||||
// assertEquals(CODESECONDCODINGSYSTEM, persistedCodeCodingSystems.get(1));
|
||||
// assertEquals(CODETHIRDCODINGSYSTEM, persistedCodeCodingSystems.get(2));
|
||||
|
@ -324,7 +290,6 @@ public class LastNElasticsearchSvcSingleObservationTest {
|
|||
private void createSingleObservation() throws IOException {
|
||||
ObservationJson indexedObservation = new ObservationJson();
|
||||
indexedObservation.setIdentifier(RESOURCEPID);
|
||||
// indexedObservation.setSubject(SUBJECTTYPEANDID);
|
||||
indexedObservation.setSubject(SUBJECTID);
|
||||
indexedObservation.setEffectiveDtm(EFFECTIVEDTM);
|
||||
|
||||
|
|
|
@ -1,53 +0,0 @@
|
|||
package ca.uhn.fhir.jpa.search.lastn.config;
|
||||
|
||||
import ca.uhn.fhir.context.ConfigurationException;
|
||||
//import ca.uhn.fhir.jpa.search.lastn.ElasticsearchV5SvcImpl;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.data.jpa.repository.config.EnableJpaRepositories;
|
||||
import org.springframework.transaction.annotation.EnableTransactionManagement;
|
||||
import pl.allegro.tech.embeddedelasticsearch.EmbeddedElastic;
|
||||
import pl.allegro.tech.embeddedelasticsearch.PopularProperties;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.UUID;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
//@Configuration
|
||||
//@EnableJpaRepositories(entityManagerFactoryRef = "entityManagerFactory")
|
||||
//@EnableTransactionManagement
|
||||
public class TestElasticsearchV5Config {
|
||||
|
||||
private final String elasticsearchHost = "127.0.0.1";
|
||||
private final String elasticsearchUserId = "";
|
||||
private final String elasticsearchPassword = "";
|
||||
|
||||
private static final String ELASTIC_VERSION = "5.6.16";
|
||||
|
||||
/*
|
||||
@Bean()
|
||||
public ElasticsearchV5SvcImpl myElasticsearchSvc() throws IOException {
|
||||
int elasticsearchPort = embeddedElasticSearch().getHttpPort();
|
||||
return new ElasticsearchV5SvcImpl(elasticsearchHost, elasticsearchPort, elasticsearchUserId, elasticsearchPassword);
|
||||
}
|
||||
|
||||
@Bean
|
||||
public EmbeddedElastic embeddedElasticSearch() {
|
||||
EmbeddedElastic embeddedElastic = null;
|
||||
try {
|
||||
embeddedElastic = EmbeddedElastic.builder()
|
||||
.withElasticVersion(ELASTIC_VERSION)
|
||||
.withSetting(PopularProperties.TRANSPORT_TCP_PORT, 0)
|
||||
.withSetting(PopularProperties.HTTP_PORT, 0)
|
||||
.withSetting(PopularProperties.CLUSTER_NAME, UUID.randomUUID())
|
||||
.withStartTimeout(60, TimeUnit.SECONDS)
|
||||
.build()
|
||||
.start();
|
||||
} catch (IOException | InterruptedException e) {
|
||||
throw new ConfigurationException(e);
|
||||
}
|
||||
|
||||
return embeddedElastic;
|
||||
}
|
||||
*/
|
||||
}
|
Loading…
Reference in New Issue