Mb auto complete code (#3330)
New autocomplete extension to ValueSet $expand with contextDirection=existing
This commit is contained in:
parent
6982c4d680
commit
6b15754b4a
|
@ -25,7 +25,7 @@ public final class Msg {
|
|||
|
||||
/**
|
||||
* IMPORTANT: Please update the following comment after you add a new code
|
||||
* Last code value: 2019
|
||||
* Last code value: 2023
|
||||
*/
|
||||
|
||||
private Msg() {}
|
||||
|
|
|
@ -0,0 +1,6 @@
|
|||
---
|
||||
type: add
|
||||
issue: 3330
|
||||
title: "The ValueSet $expand operation now supports an extension. When invoked with contextDirection=existing,
|
||||
the query will evaluate the context as a search parameter reference and respond with
|
||||
matching codings from the repository."
|
|
@ -3,7 +3,7 @@
|
|||
The HAPI JPA Server supports optional indexing via Hibernate Search when configured to use Lucene or Elasticsearch.
|
||||
This is required to support the `_content`, or `_text` search parameters.
|
||||
|
||||
# Experimental Advanced Lucene/Elasticsearch Indexing
|
||||
# Experimental Extended Lucene/Elasticsearch Indexing
|
||||
|
||||
Additional indexing is implemented for simple search parameters of type token, string, and reference.
|
||||
These implement the basic search, as well as several modifiers:
|
||||
|
@ -11,7 +11,7 @@ This **experimental** feature is enabled via the `setAdvancedLuceneIndexing()` p
|
|||
|
||||
## String search
|
||||
|
||||
The Advanced Lucene string search indexing supports the default search, as well as the modifiers defined in https://www.hl7.org/fhir/search.html#string.
|
||||
The Extended Lucene string search indexing supports the default search, as well as the modifiers defined in https://www.hl7.org/fhir/search.html#string.
|
||||
- Default searching matches by prefix, insensitive to case or accents
|
||||
- `:exact` matches the entire string, matching case and accents
|
||||
- `:contains` extends the default search to match any substring of the text
|
||||
|
@ -21,10 +21,25 @@ The Advanced Lucene string search indexing supports the default search, as well
|
|||
|
||||
## Token search
|
||||
|
||||
The Advance Lucene indexing supports the default token search by code, system, or system+code,
|
||||
The Extended Lucene Indexing supports the default token search by code, system, or system+code,
|
||||
as well as with the `:text` modifier.
|
||||
The `:text` modifier provides the same Simple Query Syntax used by string `:text` searches.
|
||||
See https://www.hl7.org/fhir/search.html#token.
|
||||
|
||||
## ValueSet autocomplete extension
|
||||
|
||||
The Extended Lucene Indexing supports an extension of the `$expand` operation on ValueSet with
|
||||
a new `contextDirection` value of `existing`. In this mode, the `context` parameter is interpreted
|
||||
as a SearchParameter reference (by resource type and code), and the `filter` is interpreted as a
|
||||
query token. The expansion will contain the most frequent `Coding` values matching the filter.
|
||||
E.g. the query
|
||||
|
||||
GET /ValueSet/$expand?contextDirection=existing&context=Observation.code:text&filter=press
|
||||
|
||||
will return a ValueSet containing the most common values indexed under `Observation.code` whose
|
||||
display text contains a word starting with "press", such as `http://loinc.org|8478-0` - "Mean blood pressure".
|
||||
This extension is only valid at the type level, and requires that Extended Lucene Indexing be enabled.
|
||||
|
||||
|
||||
|
||||
|
||||
|
|
|
@ -527,6 +527,15 @@
|
|||
<artifactId>jena-core</artifactId>
|
||||
<scope>compile</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.jayway.jsonpath</groupId>
|
||||
<artifactId>json-path-assert</artifactId>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.jayway.jsonpath</groupId>
|
||||
<artifactId>json-path</artifactId>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
|
||||
|
||||
|
|
|
@ -24,11 +24,13 @@ import ca.uhn.fhir.context.FhirContext;
|
|||
import ca.uhn.fhir.context.RuntimeSearchParam;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.dao.data.IForcedIdDao;
|
||||
import ca.uhn.fhir.jpa.dao.search.ExtendedLuceneSearchBuilder;
|
||||
import ca.uhn.fhir.jpa.dao.search.ExtendedLuceneIndexExtractor;
|
||||
import ca.uhn.fhir.jpa.dao.search.ExtendedLuceneClauseBuilder;
|
||||
import ca.uhn.fhir.jpa.dao.search.ExtendedLuceneIndexExtractor;
|
||||
import ca.uhn.fhir.jpa.dao.search.ExtendedLuceneSearchBuilder;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.model.search.ExtendedLuceneIndexData;
|
||||
import ca.uhn.fhir.jpa.search.autocomplete.ValueSetAutocompleteOptions;
|
||||
import ca.uhn.fhir.jpa.search.autocomplete.ValueSetAutocompleteSearch;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.jpa.searchparam.extractor.ResourceIndexedSearchParams;
|
||||
import ca.uhn.fhir.model.api.IQueryParameterType;
|
||||
|
@ -72,7 +74,7 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc {
|
|||
private ISearchParamRegistry mySearchParamRegistry;
|
||||
@Autowired
|
||||
private DaoConfig myDaoConfig;
|
||||
private ExtendedLuceneSearchBuilder myAdvancedIndexQueryBuilder = new ExtendedLuceneSearchBuilder();
|
||||
final private ExtendedLuceneSearchBuilder myAdvancedIndexQueryBuilder = new ExtendedLuceneSearchBuilder();
|
||||
|
||||
private Boolean ourDisabled;
|
||||
|
||||
|
@ -228,4 +230,14 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc {
|
|||
return doSearch(theResourceName, theParams, null);
|
||||
}
|
||||
|
||||
@Transactional()
|
||||
@Override
|
||||
public IBaseResource tokenAutocompleteValueSetSearch(ValueSetAutocompleteOptions theOptions) {
|
||||
|
||||
ValueSetAutocompleteSearch autocomplete = new ValueSetAutocompleteSearch(myFhirContext, getSearchSession());
|
||||
|
||||
IBaseResource result = autocomplete.search(theOptions);
|
||||
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -24,10 +24,11 @@ import java.util.List;
|
|||
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.jpa.model.search.ExtendedLuceneIndexData;
|
||||
import ca.uhn.fhir.jpa.searchparam.extractor.ResourceIndexedSearchParams;
|
||||
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||
import ca.uhn.fhir.jpa.search.autocomplete.ValueSetAutocompleteOptions;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.jpa.searchparam.extractor.ResourceIndexedSearchParams;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
|
||||
public interface IFulltextSearchSvc {
|
||||
|
@ -43,6 +44,13 @@ public interface IFulltextSearchSvc {
|
|||
*/
|
||||
List<ResourcePersistentId> search(String theResourceName, SearchParameterMap theParams);
|
||||
|
||||
/**
|
||||
* Autocomplete search for NIH $expand contextDirection=existing
|
||||
* @param theOptions operation options
|
||||
* @return a ValueSet with the search hits as the expansion.
|
||||
*/
|
||||
IBaseResource tokenAutocompleteValueSetSearch(ValueSetAutocompleteOptions theOptions);
|
||||
|
||||
List<ResourcePersistentId> everything(String theResourceName, SearchParameterMap theParams, RequestDetails theRequest);
|
||||
|
||||
boolean isDisabled();
|
||||
|
@ -60,4 +68,5 @@ public interface IFulltextSearchSvc {
|
|||
* @param theEntity the fully populated ResourceTable entity
|
||||
*/
|
||||
void reindex(ResourceTable theEntity);
|
||||
|
||||
}
|
||||
|
|
|
@ -112,7 +112,6 @@ public class ExtendedLuceneClauseBuilder {
|
|||
return;
|
||||
}
|
||||
for (List<? extends IQueryParameterType> nextAnd : theAndOrTerms) {
|
||||
String indexFieldPrefix = "sp." + theSearchParamName + ".token";
|
||||
|
||||
ourLog.debug("addTokenUnmodifiedSearch {} {}", theSearchParamName, nextAnd);
|
||||
List<? extends PredicateFinalStep> clauses = nextAnd.stream().map(orTerm -> {
|
||||
|
@ -120,19 +119,19 @@ public class ExtendedLuceneClauseBuilder {
|
|||
TokenParam token = (TokenParam) orTerm;
|
||||
if (StringUtils.isBlank(token.getSystem())) {
|
||||
// bare value
|
||||
return myPredicateFactory.match().field(indexFieldPrefix + ".code").matching(token.getValue());
|
||||
return myPredicateFactory.match().field("sp." + theSearchParamName + ".token" + ".code").matching(token.getValue());
|
||||
} else if (StringUtils.isBlank(token.getValue())) {
|
||||
// system without value
|
||||
return myPredicateFactory.match().field(indexFieldPrefix + ".system").matching(token.getSystem());
|
||||
return myPredicateFactory.match().field("sp." + theSearchParamName + ".token" + ".system").matching(token.getSystem());
|
||||
} else {
|
||||
// system + value
|
||||
return myPredicateFactory.match().field(indexFieldPrefix + ".code-system").matching(token.getValueAsQueryToken(this.myFhirContext));
|
||||
return myPredicateFactory.match().field(getTokenSystemCodeFieldPath(theSearchParamName)).matching(token.getValueAsQueryToken(this.myFhirContext));
|
||||
}
|
||||
} else if (orTerm instanceof StringParam) {
|
||||
// MB I don't quite understand why FhirResourceDaoR4SearchNoFtTest.testSearchByIdParamWrongType() uses String but here we are
|
||||
StringParam string = (StringParam) orTerm;
|
||||
// treat a string as a code with no system (like _id)
|
||||
return myPredicateFactory.match().field(indexFieldPrefix + ".code").matching(string.getValue());
|
||||
return myPredicateFactory.match().field("sp." + theSearchParamName + ".token" + ".code").matching(string.getValue());
|
||||
} else {
|
||||
throw new IllegalArgumentException(Msg.code(1089) + "Unexpected param type for token search-param: " + orTerm.getClass().getName());
|
||||
}
|
||||
|
@ -144,6 +143,11 @@ public class ExtendedLuceneClauseBuilder {
|
|||
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
public static String getTokenSystemCodeFieldPath(@Nonnull String theSearchParamName) {
|
||||
return "sp." + theSearchParamName + ".token" + ".code-system";
|
||||
}
|
||||
|
||||
public void addStringTextSearch(String theSearchParamName, List<List<IQueryParameterType>> stringAndOrTerms) {
|
||||
if (CollectionUtils.isEmpty(stringAndOrTerms)) {
|
||||
return;
|
||||
|
|
|
@ -61,9 +61,8 @@ public class ExtendedLuceneIndexExtractor {
|
|||
|
||||
if (!theNewParams.myLinks.isEmpty()) {
|
||||
|
||||
// awkwardly, links are shared between different search params if they use the same path,
|
||||
// awkwardly, links are indexed by jsonpath, not by search param.
|
||||
// so we re-build the linkage.
|
||||
// WIPMB is this the right design? Or should we follow JPA and share these?
|
||||
Map<String, List<String>> linkPathToParamName = new HashMap<>();
|
||||
for (String nextParamName : theNewParams.getPopulatedResourceLinkParameters()) {
|
||||
RuntimeSearchParam sp = myParams.get(nextParamName);
|
||||
|
|
|
@ -1,20 +1,29 @@
|
|||
/**
|
||||
* Extended fhir indexing for Hibernate Search using Lucene/Elasticsearch.
|
||||
*
|
||||
* <h1>Extended fhir indexing for Hibernate Search using Lucene/Elasticsearch.
|
||||
* <p>
|
||||
* By default, Lucene indexing only provides support for _text, and _content search parameters using
|
||||
* {@link ca.uhn.fhir.jpa.model.entity.ResourceTable#myNarrativeText} and
|
||||
* {@link ca.uhn.fhir.jpa.model.entity.ResourceTable#myContentText}.
|
||||
*
|
||||
* Both {@link ca.uhn.fhir.jpa.search.builder.SearchBuilder} and {@link ca.uhn.fhir.jpa.dao.LegacySearchBuilder} delegate the
|
||||
* search to {@link ca.uhn.fhir.jpa.dao.FulltextSearchSvcImpl} when active.
|
||||
* The fulltext search runs first and interprets any search parameters it understands, returning a pid list.
|
||||
* This pid list is used as a narrowing where clause against the remaining unprocessed search parameters.
|
||||
*
|
||||
* This package extends this search to support token, string, and reference parameters via {@link ca.uhn.fhir.jpa.model.entity.ResourceTable#myLuceneIndexData}.
|
||||
* When active, the extracted search parameters which are written to the HFJ_SPIDX_* tables are also written to the Lucene index document.
|
||||
* For now, we use the existing JPA index entities to populate the {@link ca.uhn.fhir.jpa.model.search.ExtendedLuceneIndexData}
|
||||
* in {@link ca.uhn.fhir.jpa.dao.search.ExtendedLuceneIndexExtractor#extract(ca.uhn.fhir.jpa.searchparam.extractor.ResourceIndexedSearchParams)} ()}
|
||||
*
|
||||
* <h2>Implementation</h2>
|
||||
* Both {@link ca.uhn.fhir.jpa.search.builder.SearchBuilder} and {@link ca.uhn.fhir.jpa.dao.LegacySearchBuilder} delegate the
|
||||
* search to {@link ca.uhn.fhir.jpa.dao.FulltextSearchSvcImpl#doSearch} when active.
|
||||
* The fulltext search runs first and interprets any search parameters it understands, returning a pid list.
|
||||
* This pid list is used as a narrowing where clause against the remaining unprocessed search parameters in a jdbc query.
|
||||
* The actual queries for the different search types (e.g. token, string, modifiers, etc.) are
|
||||
* generated in {@link ca.uhn.fhir.jpa.dao.search.ExtendedLuceneSearchBuilder}.
|
||||
*
|
||||
* <h2>Operation</h2>
|
||||
* During startup, Hibernate Search uses {@link ca.uhn.fhir.jpa.model.search.SearchParamTextPropertyBinder} to generate a schema.
|
||||
*
|
||||
* @see ca.uhn.fhir.jpa.model.search.HibernateSearchIndexWriter
|
||||
* @see ca.uhn.fhir.jpa.model.search.ExtendedLuceneIndexData
|
||||
* @see ca.uhn.fhir.jpa.model.search.HibernateSearchIndexWriter
|
||||
* @see ca.uhn.fhir.jpa.dao.search.ExtendedLuceneSearchBuilder
|
||||
* @see ca.uhn.fhir.jpa.model.search.SearchParamTextPropertyBinder
|
||||
*
|
||||
* Activated by {@link ca.uhn.fhir.jpa.api.config.DaoConfig#setAdvancedLuceneIndexing(boolean)}.
|
||||
*/
|
||||
|
|
|
@ -29,7 +29,9 @@ import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
|||
import ca.uhn.fhir.jpa.api.dao.DaoRegistry;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoValueSet;
|
||||
import ca.uhn.fhir.jpa.config.BaseConfig;
|
||||
import ca.uhn.fhir.jpa.dao.IFulltextSearchSvc;
|
||||
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||
import ca.uhn.fhir.jpa.search.autocomplete.ValueSetAutocompleteOptions;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermReadSvc;
|
||||
import ca.uhn.fhir.rest.annotation.IdParam;
|
||||
import ca.uhn.fhir.rest.annotation.Operation;
|
||||
|
@ -65,6 +67,8 @@ public class ValueSetOperationProvider extends BaseJpaProvider {
|
|||
@Autowired
|
||||
@Qualifier(BaseConfig.JPA_VALIDATION_SUPPORT_CHAIN)
|
||||
private ValidationSupportChain myValidationSupportChain;
|
||||
@Autowired
|
||||
private IFulltextSearchSvc myFulltextSearch;
|
||||
|
||||
public void setDaoConfig(DaoConfig theDaoConfig) {
|
||||
myDaoConfig = theDaoConfig;
|
||||
|
@ -90,6 +94,8 @@ public class ValueSetOperationProvider extends BaseJpaProvider {
|
|||
@OperationParam(name = "url", min = 0, max = 1, typeName = "uri") IPrimitiveType<String> theUrl,
|
||||
@OperationParam(name = "valueSetVersion", min = 0, max = 1, typeName = "string") IPrimitiveType<String> theValueSetVersion,
|
||||
@OperationParam(name = "filter", min = 0, max = 1, typeName = "string") IPrimitiveType<String> theFilter,
|
||||
@OperationParam(name = "context", min = 0, max = 1, typeName = "string") IPrimitiveType<String> theContext,
|
||||
@OperationParam(name = "contextDirection", min = 0, max = 1, typeName = "string") IPrimitiveType<String> theContextDirection,
|
||||
@OperationParam(name = "offset", min = 0, max = 1, typeName = "integer") IPrimitiveType<Integer> theOffset,
|
||||
@OperationParam(name = "count", min = 0, max = 1, typeName = "integer") IPrimitiveType<Integer> theCount,
|
||||
@OperationParam(name = JpaConstants.OPERATION_EXPAND_PARAM_INCLUDE_HIERARCHY, min = 0, max = 1, typeName = "boolean") IPrimitiveType<Boolean> theIncludeHierarchy,
|
||||
|
@ -99,6 +105,22 @@ public class ValueSetOperationProvider extends BaseJpaProvider {
|
|||
boolean haveIdentifier = theUrl != null && isNotBlank(theUrl.getValue());
|
||||
boolean haveValueSet = theValueSet != null && !theValueSet.isEmpty();
|
||||
boolean haveValueSetVersion = theValueSetVersion != null && !theValueSetVersion.isEmpty();
|
||||
boolean haveContextDirection = theContextDirection != null && !theContextDirection.isEmpty();
|
||||
boolean haveContext = theContext != null && !theContext.isEmpty();
|
||||
|
||||
boolean isAutocompleteExtension = haveContext && haveContextDirection && "existing".equals(theContextDirection.getValue());
|
||||
|
||||
if (isAutocompleteExtension) {
|
||||
// this is a funky extension for NIH. Do our own thing and return.
|
||||
ValueSetAutocompleteOptions options = ValueSetAutocompleteOptions.validateAndParseOptions(myDaoConfig, theContext, theFilter, theCount, theId, theUrl, theValueSet);
|
||||
startRequest(theServletRequest);
|
||||
try {
|
||||
|
||||
return myFulltextSearch.tokenAutocompleteValueSetSearch(options);
|
||||
} finally {
|
||||
endRequest(theServletRequest);
|
||||
}
|
||||
}
|
||||
|
||||
if (!haveId && !haveIdentifier && !haveValueSet) {
|
||||
throw new InvalidRequestException(Msg.code(1133) + "$expand operation at the type level (no ID specified) requires a url or a valueSet as a part of the request.");
|
||||
|
@ -265,3 +287,4 @@ public class ValueSetOperationProvider extends BaseJpaProvider {
|
|||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,127 @@
|
|||
package ca.uhn.fhir.jpa.search.autocomplete;
|
||||
|
||||
import ca.uhn.fhir.jpa.dao.search.ExtendedLuceneClauseBuilder;
|
||||
import com.google.gson.Gson;
|
||||
import com.google.gson.JsonArray;
|
||||
import com.google.gson.JsonElement;
|
||||
import com.google.gson.JsonObject;
|
||||
import com.jayway.jsonpath.Configuration;
|
||||
import com.jayway.jsonpath.DocumentContext;
|
||||
import com.jayway.jsonpath.JsonPath;
|
||||
import com.jayway.jsonpath.ParseContext;
|
||||
import com.jayway.jsonpath.spi.json.GsonJsonProvider;
|
||||
import com.jayway.jsonpath.spi.mapper.GsonMappingProvider;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.StreamSupport;
|
||||
|
||||
/**
|
||||
* Compose the autocomplete aggregation, and parse the results.
|
||||
*/
|
||||
class TokenAutocompleteAggregation {
|
||||
static final String NESTED_AGG_NAME = "nestedTopNAgg";
|
||||
/**
|
||||
* Aggregation template json.
|
||||
*
|
||||
* https://www.elastic.co/guide/en/elasticsearch/reference/current/search-aggregations.html
|
||||
*/
|
||||
static final JsonObject AGGREGATION_TEMPLATE =
|
||||
new Gson().fromJson("{\n" +
|
||||
" \"terms\": {\n" +
|
||||
" \"field\": \"sp.TEMPLATE_DUMMY.token.code-system\",\n" +
|
||||
" \"size\": 30,\n" +
|
||||
" \"min_doc_count\": 1\n" +
|
||||
" },\n" +
|
||||
" \"aggs\": {\n" +
|
||||
" \"" + NESTED_AGG_NAME + "\": {\n" +
|
||||
" \"top_hits\": {\n" +
|
||||
" \"_source\": {\n" +
|
||||
" \"includes\": [ \"sp.TEMPLATE_DUMMY\" ]\n" +
|
||||
" },\n" +
|
||||
" \"size\": 1\n" +
|
||||
" }\n" +
|
||||
" }\n" +
|
||||
" }}", JsonObject.class);
|
||||
|
||||
static final Configuration configuration = Configuration
|
||||
.builder()
|
||||
.mappingProvider(new GsonMappingProvider())
|
||||
.jsonProvider(new GsonJsonProvider())
|
||||
.build();
|
||||
static final ParseContext parseContext = JsonPath.using(configuration);
|
||||
|
||||
private final String mySpName;
|
||||
private final int myCount;
|
||||
|
||||
public TokenAutocompleteAggregation(String theSpName, int theCount) {
|
||||
Validate.notEmpty(theSpName);
|
||||
Validate.isTrue(theCount>0, "count must be positive");
|
||||
mySpName = theSpName;
|
||||
myCount = theCount;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate the JSON for the ES aggregation query.
|
||||
*
|
||||
* @return the JSON
|
||||
*/
|
||||
JsonObject toJsonAggregation() {
|
||||
// clone and modify the template with the actual field names.
|
||||
JsonObject result = AGGREGATION_TEMPLATE.deepCopy();
|
||||
DocumentContext documentContext = parseContext.parse(result);
|
||||
documentContext.set("terms.field", ExtendedLuceneClauseBuilder.getTokenSystemCodeFieldPath(mySpName));
|
||||
documentContext.set("terms.size", myCount);
|
||||
documentContext.set("aggs." + NESTED_AGG_NAME + ".top_hits._source.includes[0]","sp." + mySpName);
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract hits from the aggregation buckets
|
||||
*
|
||||
* @param theAggregationResult the ES aggregation JSON
|
||||
* @return A list of TokenAutocompleteHit, one per aggregation bucket.
|
||||
*/
|
||||
@Nonnull
|
||||
List<TokenAutocompleteHit> extractResults(@Nonnull JsonObject theAggregationResult) {
|
||||
Validate.notNull(theAggregationResult);
|
||||
|
||||
JsonArray buckets = theAggregationResult.getAsJsonArray("buckets");
|
||||
List<TokenAutocompleteHit> result = StreamSupport.stream(buckets.spliterator(), false)
|
||||
.map(b-> bucketToEntry((JsonObject) b))
|
||||
.collect(Collectors.toList());
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract the result from the top-n aggregation bucket.
|
||||
* The inner bucket contains matching hits
|
||||
*/
|
||||
@Nonnull
|
||||
TokenAutocompleteHit bucketToEntry(JsonObject theBucketJson) {
|
||||
// wrap the JsonObject for JSONPath.
|
||||
DocumentContext documentContext = parseContext.parse(theBucketJson);
|
||||
|
||||
// The outer bucket is keyed by the token value (i.e. "system|code").
|
||||
String bucketKey = documentContext.read("key", String.class);
|
||||
|
||||
// The inner bucket has a hits array, and we only need the first.
|
||||
JsonObject spRootNode = documentContext.read(NESTED_AGG_NAME + ".hits.hits[0]._source.sp");
|
||||
// MB - JsonPath doesn't have placeholders, and I don't want to screw-up quoting mySpName, so read the JsonObject explicitly
|
||||
JsonObject spNode = spRootNode.getAsJsonObject(mySpName);
|
||||
JsonElement exactNode = spNode.get("string").getAsJsonObject().get("exact");
|
||||
String displayText;
|
||||
if (exactNode.isJsonArray()) {
|
||||
displayText = exactNode.getAsJsonArray().get(0).getAsString();
|
||||
} else {
|
||||
displayText = exactNode.getAsString();
|
||||
}
|
||||
|
||||
return new TokenAutocompleteHit(bucketKey,displayText);
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,29 @@
|
|||
package ca.uhn.fhir.jpa.search.autocomplete;
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.rest.param.TokenParam;
|
||||
import ca.uhn.fhir.util.TerserUtil;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.hl7.fhir.instance.model.api.IBaseCoding;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
|
||||
/**
|
||||
* A single autocomplete search hit.
|
||||
*/
|
||||
class TokenAutocompleteHit {
|
||||
@Nonnull
|
||||
final String mySystemCode;
|
||||
final String myDisplayText;
|
||||
|
||||
TokenAutocompleteHit(@Nonnull String theSystemCode, String theDisplayText) {
|
||||
Validate.notEmpty(theSystemCode);
|
||||
mySystemCode = theSystemCode;
|
||||
myDisplayText = theDisplayText;
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
public String getSystemCode() {
|
||||
return mySystemCode;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,116 @@
|
|||
package ca.uhn.fhir.jpa.search.autocomplete;
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.jpa.dao.search.ExtendedLuceneClauseBuilder;
|
||||
import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
||||
import ca.uhn.fhir.model.api.IQueryParameterType;
|
||||
import ca.uhn.fhir.rest.param.StringParam;
|
||||
import com.google.gson.JsonObject;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.hibernate.search.backend.elasticsearch.ElasticsearchExtension;
|
||||
import org.hibernate.search.engine.search.aggregation.AggregationKey;
|
||||
import org.hibernate.search.engine.search.aggregation.SearchAggregation;
|
||||
import org.hibernate.search.engine.search.query.SearchResult;
|
||||
import org.hibernate.search.engine.search.query.dsl.SearchQueryOptionsStep;
|
||||
import org.hibernate.search.mapper.orm.search.loading.dsl.SearchLoadingOptionsStep;
|
||||
import org.hibernate.search.mapper.orm.session.SearchSession;
|
||||
import org.hl7.fhir.instance.model.api.IBaseCoding;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
|
||||
/**
|
||||
* Use aggregations to implement a search of most-frequent token search params values.
|
||||
*/
|
||||
class TokenAutocompleteSearch {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(TokenAutocompleteSearch.class);
|
||||
private static final AggregationKey<JsonObject> AGGREGATION_KEY = AggregationKey.of("autocomplete");
|
||||
|
||||
private final FhirContext myFhirContext;
|
||||
private final SearchSession mySession;
|
||||
|
||||
public TokenAutocompleteSearch(FhirContext theFhirContext, SearchSession theSession) {
|
||||
myFhirContext = theFhirContext;
|
||||
mySession = theSession;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Search for tokens indexed by theSPName on theResourceType matching theSearchText.
|
||||
* @param theResourceType The resource type (e.g. Observation)
|
||||
* @param theSPName The search param code (e.g. combo-code)
|
||||
* @param theSearchText The search test (e.g. "bloo")
|
||||
* @return A collection of Coding elements
|
||||
*/
|
||||
@Nonnull
|
||||
public List<TokenAutocompleteHit> search(String theResourceType, String theSPName, String theSearchText, String theSearchModifier, int theCount) {
|
||||
|
||||
TokenAutocompleteAggregation tokenAutocompleteAggregation = new TokenAutocompleteAggregation(theSPName, theCount);
|
||||
|
||||
if (theSearchText.equals(StringUtils.stripEnd(theSearchText,null))) {
|
||||
// no trailing whitespace. Add a wildcard to act like match_bool_prefix
|
||||
// https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-match-bool-prefix-query.html
|
||||
theSearchText = theSearchText + "*";
|
||||
}
|
||||
String queryText = theSearchText;
|
||||
|
||||
// compose the query json
|
||||
SearchQueryOptionsStep<?, ?, SearchLoadingOptionsStep, ?, ?> query = mySession.search(ResourceTable.class)
|
||||
.where(
|
||||
f -> f.bool(b -> {
|
||||
ExtendedLuceneClauseBuilder clauseBuilder = new ExtendedLuceneClauseBuilder(myFhirContext, b, f);
|
||||
|
||||
if (isNotBlank(theResourceType)) {
|
||||
b.must(f.match().field("myResourceType").matching(theResourceType));
|
||||
}
|
||||
|
||||
switch(theSearchModifier) {
|
||||
case "text":
|
||||
StringParam stringParam = new StringParam(queryText);
|
||||
List<List<IQueryParameterType>> andOrTerms = Collections.singletonList(Collections.singletonList(stringParam));
|
||||
clauseBuilder.addStringTextSearch(theSPName, andOrTerms);
|
||||
break;
|
||||
case "":
|
||||
default:
|
||||
throw new IllegalArgumentException(Msg.code(2023) + "Autocomplete only accepts text search for now.");
|
||||
|
||||
}
|
||||
|
||||
|
||||
}))
|
||||
.aggregation(AGGREGATION_KEY, buildESAggregation(tokenAutocompleteAggregation));
|
||||
|
||||
// run the query, but with 0 results. We only care about the aggregations.
|
||||
SearchResult<?> result = query.fetch(0);
|
||||
|
||||
// extract the top-n results from the aggregation json.
|
||||
JsonObject resultAgg = result.aggregation(AGGREGATION_KEY);
|
||||
List<TokenAutocompleteHit> aggEntries = tokenAutocompleteAggregation.extractResults(resultAgg);
|
||||
|
||||
return aggEntries;
|
||||
}
|
||||
|
||||
/**
|
||||
* Hibernate-search doesn't support nested aggregations, so we use an extension to build what we need from raw JSON.
|
||||
*/
|
||||
SearchAggregation<JsonObject> buildESAggregation(TokenAutocompleteAggregation tokenAutocompleteAggregation) {
|
||||
JsonObject jsonAggregation = tokenAutocompleteAggregation.toJsonAggregation();
|
||||
|
||||
SearchAggregation<JsonObject> aggregation = mySession
|
||||
.scope( ResourceTable.class )
|
||||
.aggregation()
|
||||
.extension(ElasticsearchExtension.get())
|
||||
.fromJson(jsonAggregation)
|
||||
.toAggregation();
|
||||
|
||||
return aggregation;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,92 @@
|
|||
package ca.uhn.fhir.jpa.search.autocomplete;
|
||||
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
||||
|
||||
import java.util.Optional;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
|
||||
public class ValueSetAutocompleteOptions {
|
||||
|
||||
private String myResourceType;
|
||||
private String mySearchParamCode;
|
||||
private String mySearchParamModifier;
|
||||
private String myFilter;
|
||||
private Integer myCount;
|
||||
|
||||
public static ValueSetAutocompleteOptions validateAndParseOptions(
|
||||
DaoConfig theDaoConfig,
|
||||
IPrimitiveType<String> theContext,
|
||||
IPrimitiveType<String> theFilter,
|
||||
IPrimitiveType<Integer> theCount,
|
||||
IIdType theId,
|
||||
IPrimitiveType<String> theUrl,
|
||||
IBaseResource theValueSet)
|
||||
{
|
||||
boolean haveId = theId != null && theId.hasIdPart();
|
||||
boolean haveIdentifier = theUrl != null && isNotBlank(theUrl.getValue());
|
||||
boolean haveValueSet = theValueSet != null && !theValueSet.isEmpty();
|
||||
if (haveId || haveIdentifier || haveValueSet) {
|
||||
throw new InvalidRequestException(Msg.code(2020) + "$expand with contexDirection='existing' is only supported at the type leve. It is not supported at instance level, with a url specified, or with a ValueSet .");
|
||||
}
|
||||
if (!theDaoConfig.isAdvancedLuceneIndexing()) {
|
||||
throw new InvalidRequestException(Msg.code(2022) + "$expand with contexDirection='existing' requires Extended Lucene Indexing.");
|
||||
}
|
||||
ValueSetAutocompleteOptions result = new ValueSetAutocompleteOptions();
|
||||
|
||||
result.parseContext(theContext);
|
||||
result.myFilter =
|
||||
theFilter == null ? null : theFilter.getValue();
|
||||
result.myCount = IPrimitiveType.toValueOrNull(theCount);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
private void parseContext(IPrimitiveType<String> theContextWrapper) {
|
||||
if (theContextWrapper == null || theContextWrapper.isEmpty()) {
|
||||
throw new InvalidRequestException(Msg.code(2021) + "$expand with contexDirection='existing' requires a context");
|
||||
}
|
||||
String theContext = theContextWrapper.getValue();
|
||||
int separatorIdx = theContext.indexOf('.');
|
||||
String codeWithPossibleModifier;
|
||||
if (separatorIdx >= 0) {
|
||||
myResourceType = theContext.substring(0, separatorIdx);
|
||||
codeWithPossibleModifier = theContext.substring(separatorIdx + 1);
|
||||
} else {
|
||||
codeWithPossibleModifier = theContext;
|
||||
}
|
||||
int modifierIdx = codeWithPossibleModifier.indexOf(':');
|
||||
if (modifierIdx >= 0) {
|
||||
mySearchParamCode = codeWithPossibleModifier.substring(0, modifierIdx);
|
||||
mySearchParamModifier = codeWithPossibleModifier.substring(modifierIdx + 1);
|
||||
} else {
|
||||
mySearchParamCode = codeWithPossibleModifier;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public String getResourceType() {
|
||||
return myResourceType;
|
||||
}
|
||||
|
||||
public String getSearchParamCode() {
|
||||
return mySearchParamCode;
|
||||
}
|
||||
|
||||
public String getSearchParamModifier() {
|
||||
return mySearchParamModifier;
|
||||
}
|
||||
|
||||
public String getFilter() {
|
||||
return myFilter;
|
||||
}
|
||||
|
||||
public Optional<Integer> getCount() {
|
||||
return Optional.ofNullable(myCount);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,55 @@
|
|||
package ca.uhn.fhir.jpa.search.autocomplete;
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.param.TokenParam;
|
||||
import ca.uhn.fhir.util.TerserUtil;
|
||||
import org.hibernate.search.mapper.orm.session.SearchSession;
|
||||
import org.hl7.fhir.instance.model.api.IBaseBackboneElement;
|
||||
import org.hl7.fhir.instance.model.api.IBaseCoding;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.r4.model.ValueSet;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
* Adapt the autocomplete result into a ValueSet suitable for our $expand extension.
|
||||
*/
|
||||
public class ValueSetAutocompleteSearch {
|
||||
private final FhirContext myFhirContext;
|
||||
private final TokenAutocompleteSearch myAutocompleteSearch;
|
||||
static final int DEFAULT_SIZE = 30;
|
||||
|
||||
public ValueSetAutocompleteSearch(FhirContext theFhirContext, SearchSession theSession) {
|
||||
myFhirContext = theFhirContext;
|
||||
myAutocompleteSearch = new TokenAutocompleteSearch(myFhirContext, theSession);
|
||||
}
|
||||
|
||||
public IBaseResource search(ValueSetAutocompleteOptions theOptions) {
|
||||
List<TokenAutocompleteHit> aggEntries = myAutocompleteSearch.search(theOptions.getResourceType(), theOptions.getSearchParamCode(), theOptions.getFilter(), theOptions.getSearchParamModifier(), (int) theOptions.getCount().orElse(DEFAULT_SIZE));
|
||||
|
||||
ValueSet result = new ValueSet();
|
||||
ValueSet.ValueSetExpansionComponent expansion = new ValueSet.ValueSetExpansionComponent();
|
||||
result.setExpansion(expansion);
|
||||
aggEntries.stream()
|
||||
.map(this::makeCoding)
|
||||
.forEach(expansion::addContains);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
ValueSet.ValueSetExpansionContainsComponent makeCoding(TokenAutocompleteHit theSearchHit) {
|
||||
TokenParam tokenParam = new TokenParam();
|
||||
tokenParam.setValueAsQueryToken(myFhirContext, null, null, theSearchHit.mySystemCode);
|
||||
|
||||
// R4 only for now.
|
||||
// IBaseCoding coding = TerserUtil.newElement(myFhirContext, "Coding");
|
||||
ValueSet.ValueSetExpansionContainsComponent coding = new ValueSet.ValueSetExpansionContainsComponent();
|
||||
coding.setCode(tokenParam.getValue());
|
||||
coding.setSystem(tokenParam.getSystem());
|
||||
coding.setDisplay(theSearchHit.myDisplayText);
|
||||
|
||||
return coding;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,17 @@
|
|||
/**
|
||||
* Search for distinct Coding elements by the display text, or by partial codes for a given SearchParameter.
|
||||
* <p>
|
||||
* Main entrypoint - {@link ca.uhn.fhir.jpa.search.autocomplete.TokenAutocompleteSearch#search}
|
||||
*
|
||||
* This work depends on on the Hibernate Search infrastructure in {@link ca.uhn.fhir.jpa.dao.search}.
|
||||
*
|
||||
* NIH sponsored this work to provide an interactive-autocomplete when browsing codes in a research dataset.
|
||||
*
|
||||
* https://gitlab.com/simpatico.ai/cdr/-/issues/2452
|
||||
* wipmb TODO-LIST
|
||||
* wipmb - docs - no partition support
|
||||
* wipmb - link to docs
|
||||
* wipmb what if the sp isn't of type token? do we check, or discard results without tokens?
|
||||
*
|
||||
*/
|
||||
package ca.uhn.fhir.jpa.search.autocomplete;
|
|
@ -39,7 +39,6 @@ import ca.uhn.fhir.rest.param.ReferenceParam;
|
|||
import ca.uhn.fhir.rest.param.TokenParam;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.databind.JsonMappingException;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
|
@ -750,7 +749,6 @@ public class ElasticsearchSvcImpl implements IElasticsearchSvc {
|
|||
public List<IBaseResource> getObservationResources(Collection<ResourcePersistentId> thePids) {
|
||||
SearchRequest searchRequest = buildObservationResourceSearchRequest(thePids);
|
||||
try {
|
||||
// wipmb what is the limit to an ES hit count? 10k? We may need to chunk this :-(
|
||||
SearchResponse observationDocumentResponse = executeSearchRequest(searchRequest);
|
||||
SearchHit[] observationDocumentHits = observationDocumentResponse.getHits().getHits();
|
||||
IParser parser = TolerantJsonParser.createWithLenientErrorHandling(myContext, null);
|
||||
|
@ -759,16 +757,11 @@ public class ElasticsearchSvcImpl implements IElasticsearchSvc {
|
|||
* @see ca.uhn.fhir.jpa.dao.BaseHapiFhirDao#toResource(Class, IBaseResourceEntity, Collection, boolean) for
|
||||
* details about parsing raw json to BaseResource
|
||||
*/
|
||||
// wipmb what do we do with partition?
|
||||
// wipmb what do we do with deleted observation resources
|
||||
// wipmb how do you handle provenance?
|
||||
// Parse using tolerant parser
|
||||
return Arrays.stream(observationDocumentHits)
|
||||
.map(this::parseObservationJson)
|
||||
.map(observationJson -> parser.parseResource(resourceType, observationJson.getResource()))
|
||||
.collect(Collectors.toList());
|
||||
} catch (IOException theE) {
|
||||
// wipmb do we fallback to JPA search then?
|
||||
throw new InvalidRequestException(Msg.code(2003) + "Unable to execute observation document query for provided IDs " + thePids, theE);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,7 +2,6 @@ package ca.uhn.fhir.jpa.dao.dstu2;
|
|||
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.jpa.config.TestHibernateSearchAddInConfig;
|
||||
import ca.uhn.fhir.jpa.dao.BaseJpaTest;
|
||||
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParamConstants;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
|
@ -45,7 +44,6 @@ import org.junit.jupiter.api.Disabled;
|
|||
import org.junit.jupiter.api.Test;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.TestPropertySource;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import java.math.BigDecimal;
|
||||
|
|
|
@ -61,6 +61,7 @@ import org.springframework.test.context.ContextConfiguration;
|
|||
import org.springframework.test.context.junit.jupiter.SpringExtension;
|
||||
import org.springframework.transaction.PlatformTransactionManager;
|
||||
|
||||
import javax.persistence.EntityManager;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
@ -85,6 +86,8 @@ public class FhirResourceDaoR4SearchWithElasticSearchIT extends BaseJpaTest {
|
|||
@Autowired
|
||||
protected PlatformTransactionManager myTxManager;
|
||||
@Autowired
|
||||
protected EntityManager myEntityManager;
|
||||
@Autowired
|
||||
protected ISearchParamPresenceSvc mySearchParamPresenceSvc;
|
||||
@Autowired
|
||||
protected ISearchCoordinatorSvc mySearchCoordinatorSvc;
|
||||
|
@ -411,7 +414,6 @@ public class FhirResourceDaoR4SearchWithElasticSearchIT extends BaseJpaTest {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testStringSearch() {
|
||||
IIdType id1, id2, id3, id4, id5, id6;
|
||||
|
@ -494,6 +496,8 @@ public class FhirResourceDaoR4SearchWithElasticSearchIT extends BaseJpaTest {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
private void assertObservationSearchMatchesNothing(String message, SearchParameterMap map) {
|
||||
assertObservationSearchMatches(message,map);
|
||||
}
|
||||
|
|
|
@ -0,0 +1,108 @@
|
|||
package ca.uhn.fhir.jpa.provider.r4;
|
||||
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.config.TestHibernateSearchAddInConfig;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.test.utilities.docker.RequiresDocker;
|
||||
import org.apache.commons.io.IOUtils;
|
||||
import org.apache.http.client.methods.CloseableHttpResponse;
|
||||
import org.apache.http.client.methods.HttpGet;
|
||||
import org.hamcrest.Description;
|
||||
import org.hamcrest.Matcher;
|
||||
import org.hamcrest.TypeSafeDiagnosingMatcher;
|
||||
import org.hl7.fhir.instance.model.api.IBaseCoding;
|
||||
import org.hl7.fhir.r4.model.Coding;
|
||||
import org.hl7.fhir.r4.model.Observation;
|
||||
import org.hl7.fhir.r4.model.ValueSet;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.junit.jupiter.SpringExtension;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.*;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
|
||||
@ExtendWith(SpringExtension.class)
|
||||
@RequiresDocker
|
||||
@ContextConfiguration(classes = TestHibernateSearchAddInConfig.Elasticsearch.class)
|
||||
public class ResourceProviderR4ElasticTest extends BaseResourceProviderR4Test {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(ResourceProviderR4ElasticTest.class);
|
||||
|
||||
@Autowired
|
||||
DaoConfig myDaoConfig;
|
||||
|
||||
@BeforeEach
|
||||
public void beforeEach() {
|
||||
myDaoConfig.setAdvancedLuceneIndexing(true);
|
||||
}
|
||||
|
||||
@AfterEach
|
||||
public void afterEach() {
|
||||
myDaoConfig.setAdvancedLuceneIndexing(new DaoConfig().isAdvancedLuceneIndexing());
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Test new contextDirection extension for NIH.
|
||||
*/
|
||||
@Test
|
||||
public void testAutocompleteDirectionExisting() throws IOException {
|
||||
// given
|
||||
Coding mean_blood_pressure = new Coding("http://loinc.org", "8478-0", "Mean blood pressure");
|
||||
Coding blood_count = new Coding("http://loinc.org", "789-8", "Erythrocytes [#/volume] in Blood by Automated count");
|
||||
createObservationWithCode(blood_count);
|
||||
createObservationWithCode(mean_blood_pressure);
|
||||
createObservationWithCode(mean_blood_pressure);
|
||||
createObservationWithCode(mean_blood_pressure);
|
||||
createObservationWithCode(mean_blood_pressure);
|
||||
|
||||
// when
|
||||
HttpGet expandQuery = new HttpGet(ourServerBase + "/ValueSet/$expand?contextDirection=existing&context=Observation.code:text&filter=pressure");
|
||||
try (CloseableHttpResponse response = ourHttpClient.execute(expandQuery)) {
|
||||
|
||||
// then
|
||||
assertEquals(Constants.STATUS_HTTP_200_OK, response.getStatusLine().getStatusCode());
|
||||
String text = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8);
|
||||
ValueSet valueSet = myFhirCtx.newXmlParser().parseResource(ValueSet.class, text);
|
||||
ourLog.info("testAutocompleteDirectionExisting {}", text);
|
||||
assertThat(valueSet, is(not(nullValue())));
|
||||
List<ValueSet.ValueSetExpansionContainsComponent> expansions = valueSet.getExpansion().getContains();
|
||||
assertThat(expansions, hasItem(valueSetExpansionMatching(mean_blood_pressure)));
|
||||
assertThat(expansions, not(hasItem(valueSetExpansionMatching(blood_count))));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public static Matcher<ValueSet.ValueSetExpansionContainsComponent> valueSetExpansionMatching(IBaseCoding theTarget) {
|
||||
return new TypeSafeDiagnosingMatcher<ValueSet.ValueSetExpansionContainsComponent>() {
|
||||
public void describeTo(Description description) {
|
||||
description.appendText("ValueSetExpansionContainsComponent matching ").appendValue(theTarget.getSystem() + "|" + theTarget.getCode());
|
||||
}
|
||||
|
||||
protected boolean matchesSafely(ValueSet.ValueSetExpansionContainsComponent theItem, Description mismatchDescription) {
|
||||
return Objects.equals(theItem.getSystem(), theTarget.getSystem()) &&
|
||||
Objects.equals(theItem.getCode(), theTarget.getCode());
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private void createObservationWithCode(Coding c) {
|
||||
Observation observation = new Observation();
|
||||
observation.getCode().addCoding(c);
|
||||
myObservationDao.create(observation, mySrd).getId().toUnqualifiedVersionless();
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,146 @@
|
|||
package ca.uhn.fhir.jpa.search.autocomplete;
|
||||
|
||||
import com.google.gson.Gson;
|
||||
import com.google.gson.JsonObject;
|
||||
import org.junit.jupiter.api.Nested;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
import static com.jayway.jsonpath.matchers.JsonPathMatchers.isJson;
|
||||
import static com.jayway.jsonpath.matchers.JsonPathMatchers.withJsonPath;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.empty;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.hasSize;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.not;
|
||||
|
||||
class TokenAutocompleteAggregationTest {
|
||||
|
||||
@Nested
|
||||
public class AggregationQueryContents {
|
||||
String myCode;
|
||||
int myCount = 30;
|
||||
String myAggJson;
|
||||
|
||||
@Test
|
||||
public void includesSPName() {
|
||||
myCode = "combo-code";
|
||||
|
||||
buildAggregation();
|
||||
assertThat("terms field is sp", myAggJson, isJson(withJsonPath("terms.field", equalTo("sp.combo-code.token.code-system"))));
|
||||
assertThat("fetched piece is sp", myAggJson, isJson(withJsonPath("aggs.nestedTopNAgg.top_hits._source.includes[0]", equalTo("sp.combo-code"))));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void includesCount() {
|
||||
myCode = "combo-code";
|
||||
myCount = 77;
|
||||
|
||||
buildAggregation();
|
||||
assertThat("terms field is sp", myAggJson, isJson(withJsonPath("terms.size", equalTo(77))));
|
||||
}
|
||||
|
||||
private void buildAggregation() {
|
||||
myAggJson = new TokenAutocompleteAggregation(myCode, myCount).toJsonAggregation().toString();
|
||||
}
|
||||
}
|
||||
|
||||
@Nested
|
||||
public class ResultExtraction {
|
||||
// Sample result from elastic for Observation.code
|
||||
String resultJson = "" +
|
||||
"{ \"doc_count_error_upper_bound\":0,\"sum_other_doc_count\":0," +
|
||||
" \"buckets\": [" +
|
||||
" { \"key\": \"http://loinc.org|88262-1\"," +
|
||||
" \"doc_count\":3," +
|
||||
" \"nestedTopNAgg\": " +
|
||||
" { \"hits\":" +
|
||||
" { \"total\":{\"value\":3,\"relation\":\"eq\"}, \"max_score\":1.0," +
|
||||
" \"hits\":[" +
|
||||
" { \"_index\":\"resourcetable-000001\",\"_type\":\"_doc\",\"_id\":\"13\",\"_score\":1.0," +
|
||||
" \"_source\":{\"sp\":{\"code\":" +
|
||||
" { \"string\":{\"exact\":\"Gram positive blood culture panel by Probe in Positive blood culture\",\"text\":\"Gram positive blood culture panel by Probe in Positive blood culture\",\"norm\":\"Gram positive blood culture panel by Probe in Positive blood culture\"}," +
|
||||
" \"token\":{\"code\":\"88262-1\",\"system\":\"http://loinc.org\",\"code-system\":\"http://loinc.org|88262-1\"}}}}}]}}}," +
|
||||
// a second result
|
||||
"{\"key\":\"http://loinc.org|4544-3\",\"doc_count\":1,\"nestedTopNAgg\":{\"hits\":{\"total\":{\"value\":1,\"relation\":\"eq\"},\"max_score\":1.0,\"hits\":[{\"_index\":\"resourcetable-000001\",\"_type\":\"_doc\",\"_id\":\"12\",\"_score\":1.0,\"_source\":{\"sp\":{\"code\":{\"string\":{\"exact\":\"Hematocrit [Volume Fraction] of Blood by Automated count\",\"text\":\"Hematocrit [Volume Fraction] of Blood by Automated count\",\"norm\":\"Hematocrit [Volume Fraction] of Blood by Automated count\"},\"token\":{\"code\":\"4544-3\",\"system\":\"http://loinc.org\",\"code-system\":\"http://loinc.org|4544-3\"}}}}}]}}}," +
|
||||
"{\"key\":\"http://loinc.org|4548-4\",\"doc_count\":1,\"nestedTopNAgg\":{\"hits\":{\"total\":{\"value\":1,\"relation\":\"eq\"},\"max_score\":1.0,\"hits\":[{\"_index\":\"resourcetable-000001\",\"_type\":\"_doc\",\"_id\":\"11\",\"_score\":1.0,\"_source\":{\"sp\":{\"code\":{\"string\":{\"exact\":\"Hemoglobin A1c/Hemoglobin.total in Blood\",\"text\":\"Hemoglobin A1c/Hemoglobin.total in Blood\",\"norm\":\"Hemoglobin A1c/Hemoglobin.total in Blood\"},\"token\":{\"code\":\"4548-4\",\"system\":\"http://loinc.org\",\"code-system\":\"http://loinc.org|4548-4\"}}}}}]}}}" +
|
||||
"]}";
|
||||
JsonObject parsedResult = new Gson().fromJson(resultJson, JsonObject.class);
|
||||
TokenAutocompleteAggregation myAutocompleteAggregation = new TokenAutocompleteAggregation("code", 22);
|
||||
|
||||
@Test
|
||||
public void testResultExtraction() {
|
||||
|
||||
List<TokenAutocompleteHit> hits = myAutocompleteAggregation.extractResults(parsedResult);
|
||||
|
||||
assertThat(hits, is(not(empty())));
|
||||
assertThat(hits, (hasSize(3)));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testBucketExtraction() {
|
||||
JsonObject bucket = (JsonObject) parsedResult.getAsJsonArray("buckets").get(0);
|
||||
|
||||
TokenAutocompleteHit entry = myAutocompleteAggregation.bucketToEntry(bucket);
|
||||
assertThat(entry.mySystemCode, equalTo("http://loinc.org|88262-1"));
|
||||
assertThat(entry.myDisplayText, equalTo("Gram positive blood culture panel by Probe in Positive blood culture"));
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Until we move to nested, we may have multiple Coding in a code. This is broken.
|
||||
*/
|
||||
@Test
|
||||
public void testMultiValuedBucketExtraction() {
|
||||
JsonObject bucket = new Gson().fromJson("{" +
|
||||
" \"key\": \"http://loinc.org|2708-6\"," +
|
||||
" \"doc_count\": 14," +
|
||||
" \"nestedTopNAgg\": {" +
|
||||
" \"hits\": {" +
|
||||
" \"total\": {" +
|
||||
" \"value\": 14," +
|
||||
" \"relation\": \"eq\"" +
|
||||
" }," +
|
||||
" \"max_score\": 1.0000025," +
|
||||
" \"hits\": [" +
|
||||
" {" +
|
||||
" \"_index\": \"resourcetable-000001\"," +
|
||||
" \"_type\": \"_doc\"," +
|
||||
" \"_id\": \"1393284\"," +
|
||||
" \"_score\": 1.0000025," +
|
||||
" \"_source\": {" +
|
||||
" \"sp\": {" +
|
||||
" \"code\": {" +
|
||||
" \"string\": {" +
|
||||
" \"exact\": [" +
|
||||
" \"Oxygen saturation in Arterial blood by Pulse oximetry\"," +
|
||||
" \"Oxygen saturation in Arterial blood\"" +
|
||||
" ]" +
|
||||
" }," +
|
||||
" \"token\": {" +
|
||||
" \"code-system\": [" +
|
||||
" \"http://loinc.org|2708-6\"," +
|
||||
" \"http://loinc.org|59408-5\"" +
|
||||
" ]" +
|
||||
" }" +
|
||||
" }" +
|
||||
" }" +
|
||||
" }" +
|
||||
" }" +
|
||||
" ]" +
|
||||
" }" +
|
||||
" }" +
|
||||
"}", JsonObject.class);
|
||||
|
||||
TokenAutocompleteHit entry = myAutocompleteAggregation.bucketToEntry(bucket);
|
||||
assertThat(entry.mySystemCode, equalTo("http://loinc.org|2708-6"));
|
||||
assertThat(entry.myDisplayText, equalTo("Oxygen saturation in Arterial blood by Pulse oximetry"));
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -0,0 +1,154 @@
|
|||
package ca.uhn.fhir.jpa.search.autocomplete;
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDao;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirSystemDao;
|
||||
import ca.uhn.fhir.jpa.api.svc.ISearchCoordinatorSvc;
|
||||
import ca.uhn.fhir.jpa.bulk.export.api.IBulkDataExportSvc;
|
||||
import ca.uhn.fhir.jpa.config.TestHibernateSearchAddInConfig;
|
||||
import ca.uhn.fhir.jpa.config.TestR4Config;
|
||||
import ca.uhn.fhir.jpa.dao.BaseJpaTest;
|
||||
import ca.uhn.fhir.jpa.search.reindex.IResourceReindexingSvc;
|
||||
import ca.uhn.fhir.jpa.sp.ISearchParamPresenceSvc;
|
||||
import ca.uhn.fhir.rest.server.servlet.ServletRequestDetails;
|
||||
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
|
||||
import ca.uhn.fhir.test.utilities.docker.RequiresDocker;
|
||||
import org.hamcrest.Description;
|
||||
import org.hamcrest.Matcher;
|
||||
import org.hamcrest.TypeSafeDiagnosingMatcher;
|
||||
import org.hibernate.search.mapper.orm.Search;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.Coding;
|
||||
import org.hl7.fhir.r4.model.Observation;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
import org.springframework.test.context.ContextConfiguration;
|
||||
import org.springframework.test.context.junit.jupiter.SpringExtension;
|
||||
import org.springframework.transaction.PlatformTransactionManager;
|
||||
import org.springframework.transaction.support.TransactionTemplate;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import javax.persistence.EntityManager;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.empty;
|
||||
import static org.hamcrest.Matchers.hasItem;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
|
||||
@ExtendWith(SpringExtension.class)
|
||||
@RequiresDocker
|
||||
@ContextConfiguration(classes = {TestR4Config.class, TestHibernateSearchAddInConfig.Elasticsearch.class})
|
||||
public class TokenAutocompleteElasticsearchIT extends BaseJpaTest {
|
||||
@Autowired
|
||||
protected PlatformTransactionManager myTxManager;
|
||||
protected ServletRequestDetails mySrd = new ServletRequestDetails();
|
||||
@Autowired
|
||||
@Qualifier("myObservationDaoR4")
|
||||
private IFhirResourceDao<Observation> myObservationDao;
|
||||
@Autowired
|
||||
private FhirContext myFhirCtx;
|
||||
@Autowired
|
||||
protected EntityManager myEntityManager;
|
||||
@Autowired
|
||||
protected DaoConfig myDaoConfig;
|
||||
@Autowired
|
||||
protected ISearchParamPresenceSvc mySearchParamPresenceSvc;
|
||||
@Autowired
|
||||
protected ISearchCoordinatorSvc mySearchCoordinatorSvc;
|
||||
@Autowired
|
||||
protected ISearchParamRegistry mySearchParamRegistry;
|
||||
@Autowired
|
||||
IFhirSystemDao<?,?> mySystemDao;
|
||||
@Autowired
|
||||
IResourceReindexingSvc myResourceReindexingSvc;
|
||||
@Autowired
|
||||
IBulkDataExportSvc myBulkDataExportSvc;
|
||||
|
||||
@BeforeEach
|
||||
public void beforePurgeDatabase() {
|
||||
purgeDatabase(myDaoConfig, mySystemDao, myResourceReindexingSvc, mySearchCoordinatorSvc, mySearchParamRegistry, myBulkDataExportSvc);
|
||||
myDaoConfig.setAdvancedLuceneIndexing(true);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected FhirContext getContext() {
|
||||
return myFhirCtx;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected PlatformTransactionManager getTxManager() {
|
||||
return myTxManager;
|
||||
}
|
||||
@Test
|
||||
public void testAutocompleteByCodeDisplay() {
|
||||
|
||||
// a few different codes
|
||||
Coding mean_blood_pressure = new Coding("http://loinc.org", "8478-0", "Mean blood pressure");
|
||||
|
||||
createObservationWithCode(new Coding("http://loinc.org", "789-8", "Erythrocytes [#/volume] in Blood by Automated count"));
|
||||
createObservationWithCode(mean_blood_pressure);
|
||||
createObservationWithCode(new Coding("http://loinc.org", "788-0", "Erythrocyte distribution width [Ratio] by Automated count"));
|
||||
createObservationWithCode(new Coding("http://loinc.org", "787-2", "MCV [Entitic volume] by Automated count"));
|
||||
createObservationWithCode(new Coding("http://loinc.org", "786-4", "MCHC [Mass/volume] by Automated count"));
|
||||
createObservationWithCode(new Coding("http://loinc.org", "785-6", "MCH [Entitic mass] by Automated count"));
|
||||
|
||||
createObservationWithCode(new Coding("http://loinc.org", "777-3", "Platelets [#/volume] in Blood by Automated count"));
|
||||
createObservationWithCode(new Coding("http://loinc.org", "718-7", "Hemoglobin [Mass/volume] in Blood"));
|
||||
createObservationWithCode(new Coding("http://loinc.org", "6690-2", "Leukocytes [#/volume] in Blood by Automated count"));
|
||||
createObservationWithCode(new Coding("http://loinc.org", "59032-3", "Lactate [Mass/volume] in Blood"));
|
||||
createObservationWithCode(new Coding("http://loinc.org", "4548-4", "Hemoglobin A1c/Hemoglobin.total in Blood"));
|
||||
createObservationWithCode(new Coding("http://loinc.org", "4544-3", "Hematocrit [Volume Fraction] of Blood by Automated count"));
|
||||
|
||||
// some repeats to make sure we only return singles
|
||||
createObservationWithCode(new Coding("http://loinc.org", "88262-1", "Gram positive blood culture panel by Probe in Positive blood culture"));
|
||||
createObservationWithCode(new Coding("http://loinc.org", "88262-1", "Gram positive blood culture panel by Probe in Positive blood culture"));
|
||||
createObservationWithCode(new Coding("http://loinc.org", "88262-1", "Gram positive blood culture panel by Probe in Positive blood culture"));
|
||||
|
||||
List<TokenAutocompleteHit> codes;
|
||||
codes = autocompleteSearch("Observation", "code", "blo");
|
||||
assertThat("finds blood pressure", codes, hasItem(matchingSystemAndCode(mean_blood_pressure)));
|
||||
|
||||
codes = autocompleteSearch("Observation", "code", "pressure");
|
||||
assertThat("finds blood pressure", codes, hasItem(matchingSystemAndCode(mean_blood_pressure)));
|
||||
|
||||
codes = autocompleteSearch("Observation", "code", "nuclear");
|
||||
assertThat("doesn't find nuclear", codes, is(empty()));
|
||||
}
|
||||
|
||||
List<TokenAutocompleteHit> autocompleteSearch(String theResourceType, String theSPName, String theSearchText) {
|
||||
return new TransactionTemplate(myTxManager).execute(s -> {
|
||||
TokenAutocompleteSearch tokenAutocompleteSearch = new TokenAutocompleteSearch(myFhirCtx, Search.session(myEntityManager));
|
||||
return tokenAutocompleteSearch.search(theResourceType, theSPName, theSearchText, "text",30);
|
||||
});
|
||||
}
|
||||
|
||||
private IIdType createObservationWithCode(Coding c) {
|
||||
Observation obs1 = new Observation();
|
||||
obs1.getCode().addCoding(c);
|
||||
return myObservationDao.create(obs1, mySrd).getId().toUnqualifiedVersionless();
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
private Matcher<TokenAutocompleteHit> matchingSystemAndCode(Coding theCoding) {
|
||||
return new TypeSafeDiagnosingMatcher<TokenAutocompleteHit>() {
|
||||
private final String mySystemAndCode = theCoding.getSystem() + "|" + theCoding.getCode();
|
||||
|
||||
@Override
|
||||
protected boolean matchesSafely(TokenAutocompleteHit item, Description mismatchDescription) {
|
||||
return Objects.equals(mySystemAndCode, item.getSystemCode());
|
||||
|
||||
}
|
||||
@Override
|
||||
public void describeTo(Description description) {
|
||||
description.appendText("search hit matching ").appendValue(mySystemAndCode);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,183 @@
|
|||
package ca.uhn.fhir.jpa.search.autocomplete;
|
||||
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.jpa.api.config.DaoConfig;
|
||||
import ca.uhn.fhir.model.primitive.IdDt;
|
||||
import ca.uhn.fhir.model.primitive.IntegerDt;
|
||||
import ca.uhn.fhir.model.primitive.StringDt;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
||||
import org.hl7.fhir.r4.model.ValueSet;
|
||||
import org.junit.jupiter.api.DisplayName;
|
||||
import org.junit.jupiter.api.Nested;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.util.Optional;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.not;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
import static org.hamcrest.Matchers.startsWith;
|
||||
import static org.junit.jupiter.api.Assertions.assertThrows;
|
||||
|
||||
@DisplayName("ValueSetAutocompleteOptions validation and parsing")
|
||||
class ValueSetAutocompleteOptionsTest {
|
||||
static final int ERROR_AUTOCOMPLETE_ONLY_TYPE_LEVEL = 2020;
|
||||
static final int ERROR_AUTOCOMPLETE_REQUIRES_CONTEXT = 2021;
|
||||
static final int ERROR_REQUIRES_EXTENDED_INDEXING = 2022;
|
||||
|
||||
private IPrimitiveType<String> myContext;
|
||||
private IPrimitiveType<String> myFilter;
|
||||
private IPrimitiveType<Integer> myCount;
|
||||
private IIdType myId;
|
||||
private IPrimitiveType<String> myUrl;
|
||||
private ValueSet myValueSet;
|
||||
private ValueSetAutocompleteOptions myOptionsResult;
|
||||
private DaoConfig myDaoConfig = new DaoConfig();
|
||||
|
||||
{
|
||||
myDaoConfig.setAdvancedLuceneIndexing(true);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void validWithBroadSPReference() {
|
||||
myContext = new StringDt("code");
|
||||
|
||||
parseOptions();
|
||||
|
||||
assertThat(myOptionsResult, is(not(nullValue())));
|
||||
assertThat(myOptionsResult.getResourceType(), is(nullValue()));
|
||||
assertThat(myOptionsResult.getSearchParamCode(), equalTo("code"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void validWithPlainSPReference() {
|
||||
myContext = new StringDt("Observation.code");
|
||||
|
||||
parseOptions();
|
||||
|
||||
assertThat(myOptionsResult, is(not(nullValue())));
|
||||
assertThat(myOptionsResult.getResourceType(), equalTo("Observation"));
|
||||
assertThat(myOptionsResult.getSearchParamCode(), equalTo("code"));
|
||||
assertThat(myOptionsResult.getSearchParamModifier(), is(nullValue()));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void validWithTextModifier() {
|
||||
myContext = new StringDt("Observation.code:text");
|
||||
|
||||
parseOptions();
|
||||
|
||||
assertThat(myOptionsResult, is(not(nullValue())));
|
||||
assertThat(myOptionsResult.getResourceType(), equalTo("Observation"));
|
||||
assertThat(myOptionsResult.getSearchParamCode(), equalTo("code"));
|
||||
assertThat(myOptionsResult.getSearchParamModifier(), equalTo("text"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void validContextWithFilter() {
|
||||
myContext = new StringDt("Observation.code:text");
|
||||
myFilter = new StringDt("blood");
|
||||
|
||||
parseOptions();
|
||||
|
||||
assertThat(myOptionsResult, is(not(nullValue())));
|
||||
assertThat(myOptionsResult.getFilter(), equalTo("blood"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void emptyFilterOK() {
|
||||
myContext = new StringDt("Observation.code:text");
|
||||
myFilter = new StringDt("");
|
||||
|
||||
parseOptions();
|
||||
|
||||
assertThat(myOptionsResult, is(not(nullValue())));
|
||||
assertThat(myOptionsResult.getFilter(), equalTo(""));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void defaultCountAndOffsetAreEmpty() {
|
||||
myContext = new StringDt("Observation.code:text");
|
||||
|
||||
parseOptions();
|
||||
|
||||
assertThat(myOptionsResult.getCount(), is(equalTo(Optional.empty())));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void parsesCount() {
|
||||
myContext = new StringDt("Observation.code:text");
|
||||
myCount = new IntegerDt(50);
|
||||
|
||||
parseOptions();
|
||||
|
||||
assertThat(myOptionsResult, is(not(nullValue())));
|
||||
assertThat(myOptionsResult.getCount(), equalTo(Optional.of(50)));
|
||||
}
|
||||
|
||||
@Nested
|
||||
@DisplayName("is invalid")
|
||||
public class InvalidCases {
|
||||
@Test
|
||||
public void withId() {
|
||||
myId = new IdDt("123");
|
||||
|
||||
assertParseThrowsInvalidRequestWithErrorCode(ERROR_AUTOCOMPLETE_ONLY_TYPE_LEVEL);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void withValueSetIdentifier() {
|
||||
myUrl = new StringDt("http://example.com");
|
||||
|
||||
assertParseThrowsInvalidRequestWithErrorCode(ERROR_AUTOCOMPLETE_ONLY_TYPE_LEVEL);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void withValueSet() {
|
||||
myValueSet = new ValueSet();
|
||||
myValueSet.addIdentifier().setValue("anId");
|
||||
|
||||
assertParseThrowsInvalidRequestWithErrorCode(ERROR_AUTOCOMPLETE_ONLY_TYPE_LEVEL);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void withoutContext() {
|
||||
myFilter = new StringDt("blood");
|
||||
|
||||
assertParseThrowsInvalidRequestWithErrorCode(ERROR_AUTOCOMPLETE_REQUIRES_CONTEXT);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void withEmptyContext() {
|
||||
myFilter = new StringDt("blood");
|
||||
myContext = new StringDt("");
|
||||
|
||||
assertParseThrowsInvalidRequestWithErrorCode(ERROR_AUTOCOMPLETE_REQUIRES_CONTEXT);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void whenAdvancedIndexingOff() {
|
||||
// given
|
||||
myDaoConfig.setAdvancedLuceneIndexing(false);
|
||||
|
||||
assertParseThrowsInvalidRequestWithErrorCode(ERROR_REQUIRES_EXTENDED_INDEXING);
|
||||
}
|
||||
|
||||
|
||||
|
||||
private void assertParseThrowsInvalidRequestWithErrorCode(int theErrorCode) {
|
||||
InvalidRequestException e = assertThrows(InvalidRequestException.class, ValueSetAutocompleteOptionsTest.this::parseOptions);
|
||||
assertThat(e.getMessage(), startsWith(Msg.code(theErrorCode)));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
void parseOptions() {
|
||||
myOptionsResult = ValueSetAutocompleteOptions.validateAndParseOptions(myDaoConfig, myContext, myFilter, myCount, myId, myUrl, myValueSet);
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,64 @@
|
|||
package ca.uhn.fhir.jpa.search.autocomplete;
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import org.hl7.fhir.r4.model.ValueSet;
|
||||
import org.junit.jupiter.api.Nested;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.equalTo;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.not;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
|
||||
class ValueSetAutocompleteSearchTest {
|
||||
FhirContext myFhirContext = FhirContext.forR4();
|
||||
ValueSetAutocompleteSearch myValueSetAutocompleteSearch = new ValueSetAutocompleteSearch(myFhirContext, null);
|
||||
|
||||
@Nested
|
||||
public class HitToValueSetConversion {
|
||||
|
||||
private ValueSet.ValueSetExpansionContainsComponent myCoding;
|
||||
|
||||
@Test
|
||||
public void testCreateCoding() {
|
||||
TokenAutocompleteHit entry = new TokenAutocompleteHit("http://loinc.org|4544-3", "Hematocrit [Volume Fraction] of Blood by Automated count");
|
||||
|
||||
makeCoding(entry);
|
||||
|
||||
assertThat(myCoding, is(not(nullValue())));
|
||||
assertThat(myCoding.getSystem(), equalTo("http://loinc.org"));
|
||||
assertThat(myCoding.getCode(), equalTo("4544-3"));
|
||||
assertThat(myCoding.getDisplay(), equalTo("Hematocrit [Volume Fraction] of Blood by Automated count"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCreateCodingNoSystem() {
|
||||
TokenAutocompleteHit entry = new TokenAutocompleteHit("|some_code", "Some text");
|
||||
|
||||
makeCoding(entry);
|
||||
|
||||
assertThat(myCoding, is(not(nullValue())));
|
||||
assertThat(myCoding.getSystem(), is(nullValue()));
|
||||
assertThat(myCoding.getCode(), equalTo("some_code"));
|
||||
assertThat(myCoding.getDisplay(), equalTo("Some text"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCreateCodingNoDisplay() {
|
||||
TokenAutocompleteHit entry = new TokenAutocompleteHit("|some_code", null);
|
||||
|
||||
makeCoding(entry);
|
||||
|
||||
assertThat(myCoding, is(not(nullValue())));
|
||||
assertThat(myCoding.getSystem(), is(nullValue()));
|
||||
assertThat(myCoding.getCode(), equalTo("some_code"));
|
||||
assertThat(myCoding.getDisplay(), is(nullValue()));
|
||||
|
||||
}
|
||||
|
||||
private void makeCoding(TokenAutocompleteHit theEntry) {
|
||||
myCoding = myValueSetAutocompleteSearch.makeCoding(theEntry);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -83,8 +83,8 @@ public class SearchParamTextPropertyBinder implements PropertyBinder, PropertyBr
|
|||
.analyzer("normStringAnalyzer")
|
||||
.projectable(Projectable.NO);
|
||||
|
||||
// TODO JB: may have to add normalizer to support case insensitive searches depending on token flags
|
||||
StringIndexFieldTypeOptionsStep<?> keywordFieldType = indexFieldTypeFactory.asString()
|
||||
// TODO JB: may have to add normalizer to support case insensitive searches depending on token flags
|
||||
.projectable(Projectable.NO)
|
||||
.aggregable(Aggregable.YES);
|
||||
|
||||
|
|
10
pom.xml
10
pom.xml
|
@ -911,6 +911,16 @@
|
|||
<artifactId>jackson-module-jaxb-annotations</artifactId>
|
||||
<version>${jackson_version}</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.jayway.jsonpath</groupId>
|
||||
<artifactId>json-path</artifactId>
|
||||
<version>2.5.0</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.jayway.jsonpath</groupId>
|
||||
<artifactId>json-path-assert</artifactId>
|
||||
<version>2.5.0</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.github.ben-manes.caffeine</groupId>
|
||||
<artifactId>caffeine</artifactId>
|
||||
|
|
Loading…
Reference in New Issue