Additional cleanup and test fixes.

This commit is contained in:
ianmarshall 2020-05-25 18:25:25 -04:00
parent 6ed724e01b
commit 1e554731bb
12 changed files with 282 additions and 404 deletions

View File

@ -1,183 +1,144 @@
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-elasticsearch-6</artifactId>
<version>1.0-SNAPSHOT</version>
<parent>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir</artifactId>
<version>5.1.0-SNAPSHOT</version>
<relativePath>../pom.xml</relativePath>
</parent>
<name>hapi-fhir-elasticsearch-6</name>
<!-- FIXME change it to the project's website -->
<url>http://www.example.com</url>
<artifactId>hapi-fhir-elasticsearch-6</artifactId>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<maven.compiler.source>1.7</maven.compiler.source>
<maven.compiler.target>1.7</maven.compiler.target>
</properties>
<name>hapi-fhir-elasticsearch-6</name>
<dependencies>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.12</version>
<scope>test</scope>
</dependency>
<!-- Elasticsearch -->
<dependency>
<groupId>org.elasticsearch.client</groupId>
<artifactId>elasticsearch-rest-high-level-client</artifactId>
<version>6.5.4</version>
<exclusions>
<exclusion>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>
</exclusion>
<exclusion>
<groupId>com.fasterxml.jackson.dataformat</groupId>
<artifactId>*</artifactId>
</exclusion>
<exclusion>
<groupId>com.github.spullara.mustache.java</groupId>
<artifactId>compiler</artifactId>
</exclusion>
<exclusion>
<groupId>com.tdunning</groupId>
<artifactId>t-digest</artifactId>
</exclusion>
<exclusion>
<groupId>commons-codec</groupId>
<artifactId>commons-codec</artifactId>
</exclusion>
<exclusion>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
</exclusion>
<exclusion>
<groupId>net.bytebuddy</groupId>
<artifactId>byte-buddy</artifactId>
</exclusion>
<exclusion>
<groupId>net.sf.jopt-simple</groupId>
<artifactId>jopt-simple</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>*</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-analyzers-common</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-backward-codecs</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-sandbox</artifactId>
</exclusion>
<exclusion>
<groupId>org.elasticsearch</groupId>
<artifactId>jna</artifactId>
</exclusion>
<exclusion>
<groupId>org.hdrhistogram</groupId>
<artifactId>HdrHistogram</artifactId>
</exclusion>
<exclusion>
<groupId>org.yaml</groupId>
<artifactId>snakeyaml</artifactId>
</exclusion>
</exclusions>
</dependency>
</dependencies>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<maven.compiler.source>1.7</maven.compiler.source>
<maven.compiler.target>1.7</maven.compiler.target>
</properties>
<build>
<pluginManagement><!-- lock down plugins versions to avoid using Maven defaults (may be moved to parent pom) -->
<plugins>
<!-- clean lifecycle, see https://maven.apache.org/ref/current/maven-core/lifecycles.html#clean_Lifecycle -->
<plugin>
<artifactId>maven-clean-plugin</artifactId>
<version>3.1.0</version>
</plugin>
<!-- default lifecycle, jar packaging: see https://maven.apache.org/ref/current/maven-core/default-bindings.html#Plugin_bindings_for_jar_packaging -->
<plugin>
<artifactId>maven-resources-plugin</artifactId>
<version>3.0.2</version>
</plugin>
<plugin>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.8.0</version>
</plugin>
<plugin>
<artifactId>maven-surefire-plugin</artifactId>
<version>2.22.1</version>
</plugin>
<plugin>
<artifactId>maven-jar-plugin</artifactId>
<version>3.0.2</version>
</plugin>
<plugin>
<artifactId>maven-install-plugin</artifactId>
<version>2.5.2</version>
</plugin>
<plugin>
<artifactId>maven-deploy-plugin</artifactId>
<version>2.8.2</version>
</plugin>
<!-- site lifecycle, see https://maven.apache.org/ref/current/maven-core/lifecycles.html#site_Lifecycle -->
<plugin>
<artifactId>maven-site-plugin</artifactId>
<version>3.7.1</version>
</plugin>
<plugin>
<artifactId>maven-project-info-reports-plugin</artifactId>
<version>3.0.0</version>
</plugin>
</plugins>
</pluginManagement>
<plugins>
<plugin>
<artifactId>maven-shade-plugin</artifactId>
<version>3.2.1</version>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>shade</goal>
</goals>
<configuration>
<shadedArtifactAttached>true</shadedArtifactAttached>
<shadedClassifierName>shaded6</shadedClassifierName> <!-- Any name that makes sense -->
<relocations>
<relocation>
<pattern>com.carrotsearch.hppc</pattern>
<shadedPattern>com.shadehapi.carrotsearch.hppc</shadedPattern>
</relocation>
<relocation>
<pattern>org.apache.logging.log4j</pattern>
<shadedPattern>org.shadehapi.apache.logging.log4j</shadedPattern>
</relocation>
<relocation>
<pattern>org.apache.lucene</pattern>
<shadedPattern>org.shadehapi.apache.lucene</shadedPattern>
</relocation>
<relocation>
<pattern>org.elasticsearch</pattern>
<shadedPattern>org.shadehapi.elasticsearch</shadedPattern>
</relocation>
<reloaction>
<pattern>org.joda</pattern>
<shadedPattern>org.shadehapi.joda</shadedPattern>
</reloaction>
</relocations>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
<dependencies>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.12</version>
<scope>test</scope>
</dependency>
<!-- Elasticsearch -->
<dependency>
<groupId>org.elasticsearch.client</groupId>
<artifactId>elasticsearch-rest-high-level-client</artifactId>
<version>6.5.4</version>
<exclusions>
<!-- The following all need to be excluded to avoid conflicts with Hibernate-Search -->
<exclusion>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>
</exclusion>
<exclusion>
<groupId>com.fasterxml.jackson.dataformat</groupId>
<artifactId>*</artifactId>
</exclusion>
<exclusion>
<groupId>com.github.spullara.mustache.java</groupId>
<artifactId>compiler</artifactId>
</exclusion>
<exclusion>
<groupId>com.tdunning</groupId>
<artifactId>t-digest</artifactId>
</exclusion>
<exclusion>
<groupId>commons-codec</groupId>
<artifactId>commons-codec</artifactId>
</exclusion>
<exclusion>
<groupId>commons-logging</groupId>
<artifactId>commons-logging</artifactId>
</exclusion>
<exclusion>
<groupId>net.bytebuddy</groupId>
<artifactId>byte-buddy</artifactId>
</exclusion>
<exclusion>
<groupId>net.sf.jopt-simple</groupId>
<artifactId>jopt-simple</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>*</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-analyzers-common</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-backward-codecs</artifactId>
</exclusion>
<exclusion>
<groupId>org.apache.lucene</groupId>
<artifactId>lucene-sandbox</artifactId>
</exclusion>
<exclusion>
<groupId>org.elasticsearch</groupId>
<artifactId>jna</artifactId>
</exclusion>
<exclusion>
<groupId>org.hdrhistogram</groupId>
<artifactId>HdrHistogram</artifactId>
</exclusion>
<exclusion>
<groupId>org.yaml</groupId>
<artifactId>snakeyaml</artifactId>
</exclusion>
</exclusions>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<artifactId>maven-shade-plugin</artifactId>
<version>3.2.1</version>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>shade</goal>
</goals>
<configuration>
<shadedArtifactAttached>true</shadedArtifactAttached>
<shadedClassifierName>shaded6</shadedClassifierName> <!-- Any name that makes sense -->
<relocations>
<relocation>
<pattern>com.carrotsearch.hppc</pattern>
<shadedPattern>com.shadehapi.carrotsearch.hppc</shadedPattern>
</relocation>
<relocation>
<pattern>org.apache.logging.log4j</pattern>
<shadedPattern>org.shadehapi.apache.logging.log4j</shadedPattern>
</relocation>
<relocation>
<pattern>org.apache.lucene</pattern>
<shadedPattern>org.shadehapi.apache.lucene</shadedPattern>
</relocation>
<relocation>
<pattern>org.elasticsearch</pattern>
<shadedPattern>org.shadehapi.elasticsearch</shadedPattern>
</relocation>
<reloaction>
<pattern>org.joda</pattern>
<shadedPattern>org.shadehapi.joda</shadedPattern>
</reloaction>
</relocations>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>

View File

@ -145,6 +145,13 @@
<artifactId>hapi-fhir-validation-resources-r5</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-elasticsearch-6</artifactId>
<version>${project.version}</version>
<classifier>shaded6</classifier>
</dependency>
<dependency>
<groupId>net.ttddyy</groupId>
@ -587,18 +594,6 @@
<groupId>org.jetbrains</groupId>
<artifactId>annotations</artifactId>
</dependency>
<dependency>
<groupId>ca.uhn.hapi.fhir</groupId>
<artifactId>hapi-fhir-elasticsearch-6</artifactId>
<version>1.0-SNAPSHOT</version>
<classifier>shaded6</classifier>
</dependency>
<dependency>
<groupId>org.postgresql</groupId>
<artifactId>postgresql</artifactId>
<version>42.2.9</version>
</dependency>
</dependencies>

View File

@ -224,7 +224,7 @@ public class SearchBuilder implements ISearchBuilder {
for (Map.Entry<String, List<List<IQueryParameterType>>> nextParamEntry : myParams.entrySet()) {
String nextParamName = nextParamEntry.getKey();
if (myParams.isLastN() && LastNParameterHelper.isLastNParameter(nextParamName, myContext)) {
// Skip parameters for Subject, Patient, Code and Category for LastN
// Skip parameters for Subject, Patient, Code and Category for LastN as these will be filtered by Elasticsearch
continue;
}
List<List<IQueryParameterType>> andOrParams = nextParamEntry.getValue();

View File

@ -44,6 +44,7 @@ import org.shadehapi.elasticsearch.search.sort.SortOrder;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.TreeSet;
import java.util.function.Function;
import static org.apache.commons.lang3.StringUtils.isBlank;
@ -134,7 +135,6 @@ public class ElasticsearchSvcImpl implements IElasticsearchSvc {
if (!createIndex(OBSERVATION_INDEX, observationMapping)) {
throw new RuntimeException("Failed to create observation index");
}
}
private void createCodeIndexIfMissing() throws IOException {
@ -182,23 +182,6 @@ public class ElasticsearchSvcImpl implements IElasticsearchSvc {
}
@VisibleForTesting
boolean performIndex(String theIndexName, String theDocumentId, String theIndexDocument, String theDocumentType) throws IOException {
IndexResponse indexResponse = myRestHighLevelClient.index(createIndexRequest(theIndexName, theDocumentId, theIndexDocument, theDocumentType),
RequestOptions.DEFAULT);
return (indexResponse.getResult() == DocWriteResponse.Result.CREATED) || (indexResponse.getResult() == DocWriteResponse.Result.UPDATED);
}
private IndexRequest createIndexRequest(String theIndexName, String theDocumentId, String theObservationDocument, String theDocumentType) {
IndexRequest request = new IndexRequest(theIndexName);
request.id(theDocumentId);
request.type(theDocumentType);
request.source(theObservationDocument, XContentType.JSON);
return request;
}
private boolean indexExists(String theIndexName) throws IOException {
GetIndexRequest request = new GetIndexRequest();
request.indices(theIndexName);
@ -209,90 +192,79 @@ public class ElasticsearchSvcImpl implements IElasticsearchSvc {
public List<String> executeLastN(SearchParameterMap theSearchParameterMap, FhirContext theFhirContext, Integer theMaxResultsToFetch) {
String OBSERVATION_IDENTIFIER_FIELD_NAME = "identifier";
String[] topHitsInclude = {OBSERVATION_IDENTIFIER_FIELD_NAME};
try {
List<SearchResponse> responses = buildAndExecuteSearch(theSearchParameterMap, theFhirContext, topHitsInclude);
List<String> observationIds = new ArrayList<>();
for (SearchResponse response : responses) {
Integer maxResultsToAdd = null;
if (theMaxResultsToFetch != null) {
maxResultsToAdd = theMaxResultsToFetch - observationIds.size();
}
observationIds.addAll(buildObservationList(response, ObservationJson::getIdentifier, theSearchParameterMap, theFhirContext, maxResultsToAdd));
}
return observationIds;
} catch (IOException theE) {
throw new InvalidRequestException("Unable to execute LastN request", theE);
}
return buildAndExecuteSearch(theSearchParameterMap, theFhirContext, topHitsInclude,
ObservationJson::getIdentifier, theMaxResultsToFetch);
}
private List<SearchResponse> buildAndExecuteSearch(SearchParameterMap theSearchParameterMap, FhirContext theFhirContext,
String[] topHitsInclude) {
List<SearchResponse> responses = new ArrayList<>();
private <T> List<T> buildAndExecuteSearch(SearchParameterMap theSearchParameterMap, FhirContext theFhirContext,
String[] topHitsInclude, Function<ObservationJson,T> setValue, Integer theMaxResultsToFetch) {
String patientParamName = LastNParameterHelper.getPatientParamName(theFhirContext);
String subjectParamName = LastNParameterHelper.getSubjectParamName(theFhirContext);
List<T> searchResults = new ArrayList<>();
if (theSearchParameterMap.containsKey(patientParamName)
|| theSearchParameterMap.containsKey(subjectParamName)) {
ArrayList<String> subjectReferenceCriteria = new ArrayList<>();
List<List<IQueryParameterType>> patientParams = new ArrayList<>();
if (theSearchParameterMap.get(patientParamName) != null) {
patientParams.addAll(theSearchParameterMap.get(patientParamName));
}
if (theSearchParameterMap.get(subjectParamName) != null) {
patientParams.addAll(theSearchParameterMap.get(subjectParamName));
}
for (List<? extends IQueryParameterType> nextSubjectList : patientParams) {
subjectReferenceCriteria.addAll(getReferenceValues(nextSubjectList));
}
for (String subject : subjectReferenceCriteria) {
for (String subject : getSubjectReferenceCriteria(patientParamName, subjectParamName, theSearchParameterMap)) {
if (theMaxResultsToFetch != null && searchResults.size() >= theMaxResultsToFetch) {
break;
}
SearchRequest myLastNRequest = buildObservationsSearchRequest(subject, theSearchParameterMap, theFhirContext,
createCompositeAggregationBuilder(theSearchParameterMap.getLastNMax(), topHitsInclude));
createObservationSubjectAggregationBuilder(theSearchParameterMap.getLastNMax(), topHitsInclude));
try {
SearchResponse lastnResponse = executeSearchRequest(myLastNRequest);
responses.add(lastnResponse);
searchResults.addAll(buildObservationList(lastnResponse, setValue, theSearchParameterMap, theFhirContext,
theMaxResultsToFetch));
} catch (IOException theE) {
throw new InvalidRequestException("Unable to execute LastN request", theE);
}
}
} else {
SearchRequest myLastNRequest = buildObservationsSearchRequest(theSearchParameterMap, theFhirContext, createObservationCodeAggregationBuilder(theSearchParameterMap.getLastNMax(), topHitsInclude));
SearchRequest myLastNRequest = buildObservationsSearchRequest(theSearchParameterMap, theFhirContext,
createObservationCodeAggregationBuilder(theSearchParameterMap.getLastNMax(), topHitsInclude));
try {
SearchResponse lastnResponse = executeSearchRequest(myLastNRequest);
responses.add(lastnResponse);
searchResults.addAll(buildObservationList(lastnResponse, setValue, theSearchParameterMap, theFhirContext,
theMaxResultsToFetch));
} catch (IOException theE) {
throw new InvalidRequestException("Unable to execute LastN request", theE);
}
}
return responses;
return searchResults;
}
@VisibleForTesting
List<ObservationJson> executeLastNWithAllFields(SearchParameterMap theSearchParameterMap, FhirContext theFhirContext) {
try {
List<SearchResponse> responses = buildAndExecuteSearch(theSearchParameterMap, theFhirContext, null);
List<ObservationJson> observationDocuments = new ArrayList<>();
for (SearchResponse response : responses) {
observationDocuments.addAll(buildObservationList(response, t -> t, theSearchParameterMap, theFhirContext, 100));
private List<String> getSubjectReferenceCriteria(String thePatientParamName, String theSubjectParamName, SearchParameterMap theSearchParameterMap) {
List<String> subjectReferenceCriteria = new ArrayList<>();
List<List<IQueryParameterType>> patientParams = new ArrayList<>();
if (theSearchParameterMap.get(thePatientParamName) != null) {
patientParams.addAll(theSearchParameterMap.get(thePatientParamName));
}
if (theSearchParameterMap.get(theSubjectParamName) != null) {
patientParams.addAll(theSearchParameterMap.get(theSubjectParamName));
}
for (List<? extends IQueryParameterType> nextSubjectList : patientParams) {
subjectReferenceCriteria.addAll(getReferenceValues(nextSubjectList));
}
return subjectReferenceCriteria;
}
private TreeSet<String> getReferenceValues(List<? extends IQueryParameterType> referenceParams) {
TreeSet<String> referenceList = new TreeSet<>();
for (IQueryParameterType nextOr : referenceParams) {
if (nextOr instanceof ReferenceParam) {
ReferenceParam ref = (ReferenceParam) nextOr;
if (isBlank(ref.getChain())) {
referenceList.add(ref.getValue());
}
} else {
throw new IllegalArgumentException("Invalid token type (expecting ReferenceParam): " + nextOr.getClass());
}
return observationDocuments;
} catch (IOException theE) {
throw new InvalidRequestException("Unable to execute LastN request", theE);
}
return referenceList;
}
@VisibleForTesting
List<CodeJson> queryAllIndexedObservationCodes() throws IOException {
SearchRequest codeSearchRequest = new SearchRequest(CODE_INDEX);
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
// Query
searchSourceBuilder.query(QueryBuilders.matchAllQuery());
searchSourceBuilder.size(1000);
codeSearchRequest.source(searchSourceBuilder);
SearchResponse codeSearchResponse = executeSearchRequest(codeSearchRequest);
return buildCodeResult(codeSearchResponse);
}
private CompositeAggregationBuilder createCompositeAggregationBuilder(int theMaxNumberObservationsPerCode, String[] theTopHitsInclude) {
private CompositeAggregationBuilder createObservationSubjectAggregationBuilder(int theMaxNumberObservationsPerCode, String[] theTopHitsInclude) {
CompositeValuesSourceBuilder<?> subjectValuesBuilder = new TermsValuesSourceBuilder("subject").field("subject");
List<CompositeValuesSourceBuilder<?>> compositeAggSubjectSources = new ArrayList();
compositeAggSubjectSources.add(subjectValuesBuilder);
@ -395,16 +367,6 @@ public class ElasticsearchSvcImpl implements IElasticsearchSvc {
return parsedTopHits.getHits().getHits();
}
private List<CodeJson> buildCodeResult(SearchResponse theSearchResponse) throws JsonProcessingException {
SearchHits codeHits = theSearchResponse.getHits();
List<CodeJson> codes = new ArrayList<>();
for (SearchHit codeHit : codeHits) {
CodeJson code = objectMapper.readValue(codeHit.getSourceAsString(), CodeJson.class);
codes.add(code);
}
return codes;
}
private SearchRequest buildObservationsSearchRequest(SearchParameterMap theSearchParameterMap, FhirContext theFhirContext, AggregationBuilder theAggregationBuilder) {
SearchRequest searchRequest = new SearchRequest(OBSERVATION_INDEX);
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
@ -453,23 +415,6 @@ public class ElasticsearchSvcImpl implements IElasticsearchSvc {
|| theSearchParameterMap.containsKey(LastNParameterHelper.getCodeParamName(theFhirContext)));
}
private List<String> getReferenceValues(List<? extends IQueryParameterType> referenceParams) {
ArrayList<String> referenceList = new ArrayList<>();
for (IQueryParameterType nextOr : referenceParams) {
if (nextOr instanceof ReferenceParam) {
ReferenceParam ref = (ReferenceParam) nextOr;
if (isBlank(ref.getChain())) {
referenceList.add(ref.getValue());
}
} else {
throw new IllegalArgumentException("Invalid token type (expecting ReferenceParam): " + nextOr.getClass());
}
}
return referenceList;
}
private void addCategoriesCriteria(BoolQueryBuilder theBoolQueryBuilder, SearchParameterMap theSearchParameterMap, FhirContext theFhirContext) {
String categoryParamName = LastNParameterHelper.getCategoryParamName(theFhirContext);
if (theSearchParameterMap.containsKey(categoryParamName)) {
@ -603,6 +548,50 @@ public class ElasticsearchSvcImpl implements IElasticsearchSvc {
}
@VisibleForTesting
List<ObservationJson> executeLastNWithAllFields(SearchParameterMap theSearchParameterMap, FhirContext theFhirContext) {
return buildAndExecuteSearch(theSearchParameterMap, theFhirContext, null, t -> t, 100);
}
@VisibleForTesting
List<CodeJson> queryAllIndexedObservationCodes() throws IOException {
SearchRequest codeSearchRequest = new SearchRequest(CODE_INDEX);
SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder();
// Query
searchSourceBuilder.query(QueryBuilders.matchAllQuery());
searchSourceBuilder.size(1000);
codeSearchRequest.source(searchSourceBuilder);
SearchResponse codeSearchResponse = executeSearchRequest(codeSearchRequest);
return buildCodeResult(codeSearchResponse);
}
private List<CodeJson> buildCodeResult(SearchResponse theSearchResponse) throws JsonProcessingException {
SearchHits codeHits = theSearchResponse.getHits();
List<CodeJson> codes = new ArrayList<>();
for (SearchHit codeHit : codeHits) {
CodeJson code = objectMapper.readValue(codeHit.getSourceAsString(), CodeJson.class);
codes.add(code);
}
return codes;
}
@VisibleForTesting
boolean performIndex(String theIndexName, String theDocumentId, String theIndexDocument, String theDocumentType) throws IOException {
IndexResponse indexResponse = myRestHighLevelClient.index(createIndexRequest(theIndexName, theDocumentId, theIndexDocument, theDocumentType),
RequestOptions.DEFAULT);
return (indexResponse.getResult() == DocWriteResponse.Result.CREATED) || (indexResponse.getResult() == DocWriteResponse.Result.UPDATED);
}
private IndexRequest createIndexRequest(String theIndexName, String theDocumentId, String theObservationDocument, String theDocumentType) {
IndexRequest request = new IndexRequest(theIndexName);
request.id(theDocumentId);
request.type(theDocumentType);
request.source(theObservationDocument, XContentType.JSON);
return request;
}
@VisibleForTesting
void deleteAllDocuments(String theIndexName) throws IOException {
DeleteByQueryRequest deleteByQueryRequest = new DeleteByQueryRequest(theIndexName);

View File

@ -131,13 +131,15 @@ public class TestUtil {
OneToOne oneToOne = nextField.getAnnotation(OneToOne.class);
boolean isOtherSideOfOneToManyMapping = oneToMany != null && isNotBlank(oneToMany.mappedBy());
boolean isOtherSideOfOneToOneMapping = oneToOne != null && isNotBlank(oneToOne.mappedBy());
boolean isField = nextField.getAnnotation(org.hibernate.search.annotations.Field.class) != null;
Validate.isTrue(
hasEmbedded ||
hasColumn ||
hasJoinColumn ||
isOtherSideOfOneToManyMapping ||
isOtherSideOfOneToOneMapping ||
hasEmbeddedId, "Non-transient has no @Column or @JoinColumn or @EmbeddedId: " + nextField);
hasEmbeddedId ||
isField, "Non-transient has no @Column or @JoinColumn or @EmbeddedId: " + nextField);
}

View File

@ -102,22 +102,16 @@ public class TestR4Config extends BaseJavaConfigR4 {
};
retVal.setDriver(new org.h2.Driver());
// retVal.setDriver(new org.postgresql.Driver());
retVal.setUrl("jdbc:h2:mem:testdb_r4");
// retVal.setUrl("jdbc:postgresql://localhost:5432/cdr");
retVal.setMaxWaitMillis(10000);
retVal.setUsername("");
// retVal.setUsername("cdr");
retVal.setPassword("");
// retVal.setPassword("SmileCDR");
retVal.setMaxTotal(ourMaxThreads);
SLF4JLogLevel level = SLF4JLogLevel.INFO;
DataSource dataSource = ProxyDataSourceBuilder
.create(retVal)
// .logQueryBySlf4j(level, "SQL")
.logSlowQueryBySlf4j(10, TimeUnit.SECONDS)
// .countQuery(new ThreadQueryCountHolder())
.beforeQuery(new BlockLargeNumbersOfParamsListener())
.afterQuery(captureQueriesListener())
.afterQuery(new CurrentThreadCaptureQueriesListener())
@ -149,7 +143,6 @@ public class TestR4Config extends BaseJavaConfigR4 {
extraProperties.put("hibernate.show_sql", "false");
extraProperties.put("hibernate.hbm2ddl.auto", "update");
extraProperties.put("hibernate.dialect", H2Dialect.class.getName());
// extraProperties.put("hibernate.dialect", org.hibernate.dialect.PostgreSQL95Dialect.class.getName());
extraProperties.put("hibernate.search.model_mapping", ca.uhn.fhir.jpa.search.LuceneSearchMappingFactory.class.getName());
extraProperties.put("hibernate.search.default.directory_provider", "local-heap");
extraProperties.put("hibernate.search.lucene_version", "LUCENE_CURRENT");

View File

@ -24,9 +24,7 @@ public class TestR4ConfigWithElasticSearch extends TestR4Config {
private static final String ELASTIC_VERSION = "6.5.4";
protected final String elasticsearchHost = "localhost";
protected final String elasticsearchUserId = "";
// protected final String elasticsearchUserId = "elastic";
protected final String elasticsearchPassword = "";
// protected final String elasticsearchPassword = "changeme";
@Override
@ -36,7 +34,6 @@ public class TestR4ConfigWithElasticSearch extends TestR4Config {
// Force elasticsearch to start first
int httpPort = embeddedElasticSearch().getHttpPort();
// int httpPort = 9301;
ourLog.info("ElasticSearch started on port: {}", httpPort);
new ElasticsearchHibernatePropertiesBuilder()

View File

@ -10,7 +10,6 @@ public class TestR4ConfigWithElasticsearchClient extends TestR4ConfigWithElastic
@Bean()
public ElasticsearchSvcImpl myElasticsearchSvc() {
int elasticsearchPort = embeddedElasticSearch().getHttpPort();
// int elasticsearchPort = 9301;
return new ElasticsearchSvcImpl(elasticsearchHost, elasticsearchPort, elasticsearchUserId, elasticsearchPassword);
}

View File

@ -10,16 +10,18 @@ import javax.persistence.*;
@Entity
@Indexed(index = "code_index")
@Embeddable
@Table(name = "HFJ_SPIDX_LASTN_CODEABLE_CONCEPT")
@Table(name = "HFJ_SPIDX_LASTN_CODE_CONCEPT")
public class ObservationIndexedCodeCodeableConceptEntity {
@Id
public static final int MAX_LENGTH = 200;
@Id
@DocumentId(name = "codeable_concept_id")
@Column(name="CODEABLE_CONCEPT_ID")
@Column(name="CODEABLE_CONCEPT_ID", length = MAX_LENGTH)
private String myCodeableConceptId;
@Field(name = "text")
@Column(name = "CODEABLE_CONCEPT_TEXT", nullable = true)
@Column(name = "CODEABLE_CONCEPT_TEXT", nullable = true, length = MAX_LENGTH)
private String myCodeableConceptText;
// TODO: Make coding a Collection. Need to first figure out how to maintain this over time.

View File

@ -11,6 +11,8 @@ import javax.persistence.*;
@Table(name = "HFJ_SPIDX_LASTN_CODING")
public class ObservationIndexedCodeCodingEntity {
public static final int MAX_LENGTH = 200;
// TODO: Fix this to allow multiple codings for observation code
// @Id
// @SequenceGenerator(name = "SEQ_CODING_FIELD", sequenceName = "SEQ_CODING_FIELD")
@ -18,7 +20,7 @@ public class ObservationIndexedCodeCodingEntity {
// private Long myId;
@Id
@Column(name="CODEABLE_CONCEPT_ID")
@Column(name="CODEABLE_CONCEPT_ID", length = MAX_LENGTH)
private String myCodeableConceptId;
@Field (name = "code", analyze = Analyze.NO)

View File

@ -13,6 +13,8 @@ import java.util.*;
@Indexed(index = "observation_index")
public class ObservationIndexedSearchParamLastNEntity {
public static final int MAX_LENGTH = 200;
@Id
@SequenceGenerator(name = "SEQ_LASTN", sequenceName = "SEQ_LASTN")
@GeneratedValue(strategy = GenerationType.AUTO, generator = "SEQ_LASTN")
@ -20,7 +22,7 @@ public class ObservationIndexedSearchParamLastNEntity {
private Long myId;
@Field(name = "subject", analyze = Analyze.NO)
@Column(name = "LASTN_SUBJECT_ID", nullable = true)
@Column(name = "LASTN_SUBJECT_ID", nullable = true, length = MAX_LENGTH)
private String mySubject;
@ManyToOne(fetch = FetchType.LAZY)
@ -29,7 +31,7 @@ public class ObservationIndexedSearchParamLastNEntity {
private ObservationIndexedCodeCodeableConceptEntity myObservationCode;
@Field(name = "codeconceptid", analyze = Analyze.NO)
@Column(name = "CODEABLE_CONCEPT_ID", nullable = false, updatable = false, insertable = false)
@Column(name = "CODEABLE_CONCEPT_ID", nullable = false, updatable = false, insertable = false, length = MAX_LENGTH)
private String myCodeNormalizedId;
@IndexedEmbedded(depth = 2, prefix = "categoryconcept")
@ -42,7 +44,7 @@ public class ObservationIndexedSearchParamLastNEntity {
private Date myEffectiveDtm;
@DocumentId(name = "identifier")
@Column(name = "RESOURCE_IDENTIFIER", nullable = false)
@Column(name = "RESOURCE_IDENTIFIER", nullable = false, length = MAX_LENGTH)
private String myIdentifier;
public ObservationIndexedSearchParamLastNEntity() {

View File

@ -165,12 +165,6 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor
return new ResourceLinkExtractor();
}
@Override
public PathAndRef extractReferenceLinkFromResource(IBase theValue, String thePath) {
ResourceLinkExtractor extractor = new ResourceLinkExtractor();
return extractor.get(theValue, thePath);
}
private class ResourceLinkExtractor implements IExtractor<PathAndRef> {
private PathAndRef myPathAndRef = null;
@ -249,16 +243,10 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor
}
}
private <T extends BaseResourceIndexedSearchParam> List<String> extractParamsAsQueryTokens(RuntimeSearchParam theSearchParam, IBaseResource theResource, IExtractor<T> theExtractor) {
SearchParamSet<T> params = new SearchParamSet<>();
extractSearchParam(theSearchParam, theResource, theExtractor, params);
return toStringList(params);
}
private <T extends BaseResourceIndexedSearchParam> List<String> toStringList(SearchParamSet<T> theParams) {
return theParams.stream()
.map(param -> param.toQueryParameterType().getValueAsQueryToken(myContext))
.collect(Collectors.toList());
@Override
public PathAndRef extractReferenceLinkFromResource(IBase theValue, String thePath) {
ResourceLinkExtractor extractor = new ResourceLinkExtractor();
return extractor.get(theValue, thePath);
}
@Override
@ -309,6 +297,18 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor
.collect(Collectors.toList());
}
private <T extends BaseResourceIndexedSearchParam> List<String> extractParamsAsQueryTokens(RuntimeSearchParam theSearchParam, IBaseResource theResource, IExtractor<T> theExtractor) {
SearchParamSet<T> params = new SearchParamSet<>();
extractSearchParam(theSearchParam, theResource, theExtractor, params);
return toStringList(params);
}
private <T extends BaseResourceIndexedSearchParam> List<String> toStringList(SearchParamSet<T> theParams) {
return theParams.stream()
.map(param -> param.toQueryParameterType().getValueAsQueryToken(myContext))
.collect(Collectors.toList());
}
@Override
public SearchParamSet<BaseResourceIndexedSearchParam> extractSearchParamTokens(IBaseResource theResource) {
IExtractor<BaseResourceIndexedSearchParam> extractor = createTokenExtractor(theResource);
@ -734,6 +734,20 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor
return tokenTextIndexingEnabledForSearchParam(myModelConfig, theSearchParam);
}
private void addToken_CodeableConcept(String theResourceType, Set<BaseResourceIndexedSearchParam> theParams, RuntimeSearchParam theSearchParam, IBase theValue) {
List<IBase> codings = getCodingsFromCodeableConcept(theValue);
for (IBase nextCoding : codings) {
addToken_Coding(theResourceType, theParams, theSearchParam, nextCoding);
}
if (shouldIndexTextComponentOfToken(theSearchParam)) {
String text = getDisplayTextFromCodeableConcept(theValue);
if (isNotBlank(text)) {
createStringIndexIfNotBlank(theResourceType, theParams, theSearchParam, text);
}
}
}
@Override
public List<IBase> getCodingsFromCodeableConcept(IBase theValue) {
String nextType = BaseSearchParamExtractor.this.toRootTypeName(theValue);
@ -754,20 +768,6 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor
}
}
private void addToken_CodeableConcept(String theResourceType, Set<BaseResourceIndexedSearchParam> theParams, RuntimeSearchParam theSearchParam, IBase theValue) {
List<IBase> codings = getCodingsFromCodeableConcept(theValue);
for (IBase nextCoding : codings) {
addToken_Coding(theResourceType, theParams, theSearchParam, nextCoding);
}
if (shouldIndexTextComponentOfToken(theSearchParam)) {
String text = getDisplayTextFromCodeableConcept(theValue);
if (isNotBlank(text)) {
createStringIndexIfNotBlank(theResourceType, theParams, theSearchParam, text);
}
}
}
private void addToken_Coding(String theResourceType, Set<BaseResourceIndexedSearchParam> theParams, RuntimeSearchParam theSearchParam, IBase theValue) {
ResourceIndexedSearchParamToken resourceIndexedSearchParamToken = createSearchParamForCoding(theResourceType, theSearchParam, theValue);
if (resourceIndexedSearchParamToken != null) {
@ -822,61 +822,6 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor
}
}
private void addDate_Period(String theResourceType, Set<ResourceIndexedSearchParamDate> theParams, RuntimeSearchParam theSearchParam, IBase theValue) {
Date start = extractValueAsDate(myPeriodStartValueChild, theValue);
String startAsString = extractValueAsString(myPeriodStartValueChild, theValue);
Date end = extractValueAsDate(myPeriodEndValueChild, theValue);
String endAsString = extractValueAsString(myPeriodEndValueChild, theValue);
if (start != null || end != null) {
ResourceIndexedSearchParamDate nextEntity = new ResourceIndexedSearchParamDate(myPartitionSettings, theResourceType, theSearchParam.getName(), start, startAsString, end, endAsString, startAsString);
theParams.add(nextEntity);
}
}
private void addDate_Timing(String theResourceType, Set<ResourceIndexedSearchParamDate> theParams, RuntimeSearchParam theSearchParam, IBase theValue) {
List<IPrimitiveType<Date>> values = extractValuesAsFhirDates(myTimingEventValueChild, theValue);
TreeSet<Date> dates = new TreeSet<>();
TreeSet<String> dateStrings = new TreeSet<>();
String firstValue = null;
String finalValue = null;
for (IPrimitiveType<Date> nextEvent : values) {
if (nextEvent.getValue() != null) {
dates.add(nextEvent.getValue());
if (firstValue == null) {
firstValue = nextEvent.getValueAsString();
}
finalValue = nextEvent.getValueAsString();
}
}
Optional<IBase> repeat = myTimingRepeatValueChild.getAccessor().getFirstValueOrNull(theValue);
if (repeat.isPresent()) {
Optional<IBase> bounds = myTimingRepeatBoundsValueChild.getAccessor().getFirstValueOrNull(repeat.get());
if (bounds.isPresent()) {
String boundsType = toRootTypeName(bounds.get());
if ("Period".equals(boundsType)) {
Date start = extractValueAsDate(myPeriodStartValueChild, bounds.get());
Date end = extractValueAsDate(myPeriodEndValueChild, bounds.get());
String endString = extractValueAsString(myPeriodEndValueChild, bounds.get());
dates.add(start);
dates.add(end);
//TODO Check if this logic is valid. Does the start of the first period indicate a lower bound??
if (firstValue == null) {
firstValue = extractValueAsString(myPeriodStartValueChild, bounds.get());
}
finalValue = endString;
}
}
}
if (!dates.isEmpty()) {
ResourceIndexedSearchParamDate nextEntity = new ResourceIndexedSearchParamDate(myPartitionSettings, theResourceType, theSearchParam.getName(), dates.first(), firstValue, dates.last(), finalValue, firstValue);
theParams.add(nextEntity);
}
}
private void addNumber_Duration(String theResourceType, Set<ResourceIndexedSearchParamNumber> theParams, RuntimeSearchParam theSearchParam, IBase theValue) {
String system = extractValueAsString(myDurationSystemValueChild, theValue);
String code = extractValueAsString(myDurationCodeValueChild, theValue);
@ -1060,15 +1005,6 @@ public abstract class BaseSearchParamExtractor implements ISearchParamExtractor
return elementDefinition.getName();
}
@SuppressWarnings("unchecked")
private void addDateTimeTypes(String theResourceType, Set<ResourceIndexedSearchParamDate> theParams, RuntimeSearchParam theSearchParam, IBase theValue) {
IPrimitiveType<Date> nextBaseDateTime = (IPrimitiveType<Date>) theValue;
if (nextBaseDateTime.getValue() != null) {
ResourceIndexedSearchParamDate param = new ResourceIndexedSearchParamDate(myPartitionSettings, theResourceType, theSearchParam.getName(), nextBaseDateTime.getValue(), nextBaseDateTime.getValueAsString(), nextBaseDateTime.getValue(), nextBaseDateTime.getValueAsString(), nextBaseDateTime.getValueAsString());
theParams.add(param);
}
}
private void addUri_Uri(String theResourceType, Set<ResourceIndexedSearchParamUri> theParams, RuntimeSearchParam theSearchParam, IBase theValue) {
IPrimitiveType<?> value = (IPrimitiveType<?>) theValue;
String valueAsString = value.getValueAsString();