Merge branch 'hapifhir:master' into fix_for_5.0.0-snapshot1
This commit is contained in:
commit
644b885ebe
|
@ -29,6 +29,7 @@ import org.apache.commons.lang3.Validate;
|
|||
import org.apache.commons.lang3.builder.EqualsBuilder;
|
||||
import org.apache.commons.lang3.builder.HashCodeBuilder;
|
||||
import org.hl7.fhir.instance.model.api.IBase;
|
||||
import org.hl7.fhir.instance.model.api.IBaseCoding;
|
||||
import org.hl7.fhir.instance.model.api.IBaseParameters;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
||||
|
@ -806,21 +807,43 @@ public interface IValidationSupport {
|
|||
|
||||
|
||||
class TranslateCodeRequest {
|
||||
private final String mySourceSystemUrl;
|
||||
private final String mySourceCode;
|
||||
private List<IBaseCoding> myCodings;
|
||||
private final String myTargetSystemUrl;
|
||||
private final int myHashCode;
|
||||
private final String myConceptMapUrl;
|
||||
private final String myConceptMapVersion;
|
||||
private final String mySourceValueSetUrl;
|
||||
private final String myTargetValueSetUrl;
|
||||
private final Long myResourcePid;
|
||||
private final boolean myReverse;
|
||||
|
||||
public TranslateCodeRequest(String theSourceSystemUrl, String theSourceCode, String theTargetSystemUrl) {
|
||||
mySourceSystemUrl = theSourceSystemUrl;
|
||||
mySourceCode = theSourceCode;
|
||||
public TranslateCodeRequest(List<IBaseCoding> theCodings, String theTargetSystemUrl) {
|
||||
myCodings = theCodings;
|
||||
myTargetSystemUrl = theTargetSystemUrl;
|
||||
myConceptMapUrl = null;
|
||||
myConceptMapVersion = null;
|
||||
mySourceValueSetUrl = null;
|
||||
myTargetValueSetUrl = null;
|
||||
myResourcePid = null;
|
||||
myReverse = false;
|
||||
}
|
||||
|
||||
myHashCode = new HashCodeBuilder(17, 37)
|
||||
.append(mySourceSystemUrl)
|
||||
.append(mySourceCode)
|
||||
.append(myTargetSystemUrl)
|
||||
.toHashCode();
|
||||
public TranslateCodeRequest(
|
||||
List<IBaseCoding> theCodings,
|
||||
String theTargetSystemUrl,
|
||||
String theConceptMapUrl,
|
||||
String theConceptMapVersion,
|
||||
String theSourceValueSetUrl,
|
||||
String theTargetValueSetUrl,
|
||||
Long theResourcePid,
|
||||
boolean theReverse) {
|
||||
myCodings = theCodings;
|
||||
myTargetSystemUrl = theTargetSystemUrl;
|
||||
myConceptMapUrl = theConceptMapUrl;
|
||||
myConceptMapVersion = theConceptMapVersion;
|
||||
mySourceValueSetUrl = theSourceValueSetUrl;
|
||||
myTargetValueSetUrl = theTargetValueSetUrl;
|
||||
myResourcePid = theResourcePid;
|
||||
myReverse = theReverse;
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -836,28 +859,62 @@ public interface IValidationSupport {
|
|||
TranslateCodeRequest that = (TranslateCodeRequest) theO;
|
||||
|
||||
return new EqualsBuilder()
|
||||
.append(mySourceSystemUrl, that.mySourceSystemUrl)
|
||||
.append(mySourceCode, that.mySourceCode)
|
||||
.append(myCodings, that.myCodings)
|
||||
.append(myTargetSystemUrl, that.myTargetSystemUrl)
|
||||
.append(myConceptMapUrl, that.myConceptMapUrl)
|
||||
.append(myConceptMapVersion, that.myConceptMapVersion)
|
||||
.append(mySourceValueSetUrl, that.mySourceValueSetUrl)
|
||||
.append(myTargetValueSetUrl, that.myTargetValueSetUrl)
|
||||
.append(myResourcePid, that.myResourcePid)
|
||||
.append(myReverse, that.myReverse)
|
||||
.isEquals();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return myHashCode;
|
||||
return new HashCodeBuilder(17, 37)
|
||||
.append(myCodings)
|
||||
.append(myTargetSystemUrl)
|
||||
.append(myConceptMapUrl)
|
||||
.append(myConceptMapVersion)
|
||||
.append(mySourceValueSetUrl)
|
||||
.append(myTargetValueSetUrl)
|
||||
.append(myResourcePid)
|
||||
.append(myReverse)
|
||||
.toHashCode();
|
||||
}
|
||||
|
||||
public String getSourceSystemUrl() {
|
||||
return mySourceSystemUrl;
|
||||
}
|
||||
|
||||
public String getSourceCode() {
|
||||
return mySourceCode;
|
||||
public List<IBaseCoding> getCodings() {
|
||||
return myCodings;
|
||||
}
|
||||
|
||||
public String getTargetSystemUrl() {
|
||||
return myTargetSystemUrl;
|
||||
}
|
||||
|
||||
public String getConceptMapUrl() {
|
||||
return myConceptMapUrl;
|
||||
}
|
||||
|
||||
public String getConceptMapVersion() {
|
||||
return myConceptMapVersion;
|
||||
}
|
||||
|
||||
public String getSourceValueSetUrl() {
|
||||
return mySourceValueSetUrl;
|
||||
}
|
||||
|
||||
public String getTargetValueSetUrl() {
|
||||
return myTargetValueSetUrl;
|
||||
}
|
||||
|
||||
public Long getResourcePid() {
|
||||
return myResourcePid;
|
||||
}
|
||||
|
||||
public boolean isReverse() {
|
||||
return myReverse;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -25,7 +25,7 @@ public final class Msg {
|
|||
|
||||
/**
|
||||
* IMPORTANT: Please update the following comment after you add a new code
|
||||
* Last code value: 2076
|
||||
* Last code value: 2078
|
||||
*/
|
||||
|
||||
private Msg() {}
|
||||
|
|
|
@ -22,12 +22,7 @@ package ca.uhn.fhir.rest.api;
|
|||
|
||||
import java.nio.charset.Charset;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.*;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.defaultIfBlank;
|
||||
|
||||
|
@ -217,6 +212,7 @@ public class Constants {
|
|||
public static final String PARAMQUALIFIER_STRING_EXACT = ":exact";
|
||||
public static final String PARAMQUALIFIER_TOKEN_TEXT = ":text";
|
||||
public static final String PARAMQUALIFIER_MDM = ":mdm";
|
||||
public static final String PARAMQUALIFIER_NICKNAME = ":nickname";
|
||||
public static final String PARAMQUALIFIER_TOKEN_OF_TYPE = ":of-type";
|
||||
public static final String PARAMQUALIFIER_TOKEN_NOT = ":not";
|
||||
public static final int STATUS_HTTP_200_OK = 200;
|
||||
|
|
|
@ -21,9 +21,11 @@ package ca.uhn.fhir.rest.param;
|
|||
*/
|
||||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.model.api.IQueryParameterType;
|
||||
import ca.uhn.fhir.model.primitive.StringDt;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.commons.lang3.builder.EqualsBuilder;
|
||||
import org.apache.commons.lang3.builder.HashCodeBuilder;
|
||||
|
@ -38,7 +40,8 @@ public class StringParam extends BaseParam implements IQueryParameterType {
|
|||
private boolean myExact;
|
||||
private String myValue;
|
||||
|
||||
private Boolean myMdmExpand;
|
||||
private Boolean myNicknameExpand;
|
||||
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
|
@ -77,12 +80,12 @@ public class StringParam extends BaseParam implements IQueryParameterType {
|
|||
return ParameterUtil.escape(myValue);
|
||||
}
|
||||
|
||||
public boolean isMdmExpand() {
|
||||
return myMdmExpand != null && myMdmExpand;
|
||||
public boolean isNicknameExpand() {
|
||||
return myNicknameExpand != null && myNicknameExpand;
|
||||
}
|
||||
|
||||
public StringParam setMdmExpand(boolean theMdmExpand) {
|
||||
myMdmExpand = theMdmExpand;
|
||||
public StringParam setNicknameExpand(boolean theNicknameExpand) {
|
||||
myNicknameExpand = theNicknameExpand;
|
||||
return this;
|
||||
}
|
||||
|
||||
|
@ -98,6 +101,15 @@ public class StringParam extends BaseParam implements IQueryParameterType {
|
|||
|
||||
@Override
|
||||
void doSetValueAsQueryToken(FhirContext theContext, String theParamName, String theQualifier, String theValue) {
|
||||
if (Constants.PARAMQUALIFIER_NICKNAME.equals(theQualifier)) {
|
||||
if ("name".equals(theParamName) || "given".equals(theParamName)) {
|
||||
myNicknameExpand = true;
|
||||
theQualifier = "";
|
||||
} else {
|
||||
throw new InvalidRequestException(Msg.code(2077) + "Modifier " + Constants.PARAMQUALIFIER_NICKNAME + " may only be used with 'name' and 'given' search parameters");
|
||||
}
|
||||
}
|
||||
|
||||
if (Constants.PARAMQUALIFIER_STRING_EXACT.equals(theQualifier)) {
|
||||
setExact(true);
|
||||
} else {
|
||||
|
|
|
@ -58,8 +58,8 @@ public class SearchParameterUtil {
|
|||
* 1. Attempt to find one called 'patient'
|
||||
* 2. If that fails, find one called 'subject'
|
||||
* 3. If that fails, find find by Patient Compartment.
|
||||
* 3.1 If that returns >1 result, throw an error
|
||||
* 3.2 If that returns 1 result, return it
|
||||
* 3.1 If that returns >1 result, throw an error
|
||||
* 3.2 If that returns 1 result, return it
|
||||
*/
|
||||
public static Optional<RuntimeSearchParam> getOnlyPatientSearchParamForResourceType(FhirContext theFhirContext, String theResourceType) {
|
||||
RuntimeSearchParam myPatientSearchParam = null;
|
||||
|
@ -115,7 +115,7 @@ public class SearchParameterUtil {
|
|||
*/
|
||||
public static boolean isResourceTypeInPatientCompartment(FhirContext theFhirContext, String theResourceType) {
|
||||
RuntimeResourceDefinition runtimeResourceDefinition = theFhirContext.getResourceDefinition(theResourceType);
|
||||
return getAllPatientCompartmentRuntimeSearchParams(runtimeResourceDefinition).size() > 0;
|
||||
return getAllPatientCompartmentRuntimeSearchParams(runtimeResourceDefinition).size() > 0;
|
||||
}
|
||||
|
||||
|
||||
|
@ -147,4 +147,15 @@ public class SearchParameterUtil {
|
|||
.map(t -> t.getValueAsString())
|
||||
.orElse(null);
|
||||
}
|
||||
|
||||
public static String stripModifier(String theSearchParam) {
|
||||
String retval;
|
||||
int colonIndex = theSearchParam.indexOf(":");
|
||||
if (colonIndex == -1) {
|
||||
retval = theSearchParam;
|
||||
} else {
|
||||
retval = theSearchParam.substring(0, colonIndex);
|
||||
}
|
||||
return retval;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -0,0 +1,3 @@
|
|||
type: add
|
||||
issue: 3442
|
||||
title: "Provided a Remote Terminology Service implementation for the $translate operation."
|
|
@ -0,0 +1,3 @@
|
|||
type: add
|
||||
issue: 3534
|
||||
title: "Added a new multi-column index on the HFJ_RESOURCE table indexing the columns RES_TYPE, RES_DELETED_AT, RES_UPDATED, PARTITION_ID and RES_ID and removed the existing single-column index on the RES_TYPE column. This new index will improve the performance of the $reindex operation and will be useful for some other queries a well."
|
|
@ -0,0 +1,5 @@
|
|||
type: change
|
||||
issue: 3534
|
||||
title: "Ensure that migration steps do not timeout.
|
||||
Adding an index, and other migration steps can take exceed the default connection timeout.
|
||||
This has been changed - migration steps now have no timeout and will run until completion."
|
|
@ -0,0 +1,5 @@
|
|||
type: add
|
||||
issue: 3563
|
||||
title: "Added search parameter modifier :nickname that can be used with 'name' or 'given' search parameters.
|
||||
E.g. ?Patient?given:nickname=Kenny will match a patient with the given name Kenneth. Also added MDM matching
|
||||
algorithm named NICKNAME that matches based this."
|
|
@ -0,0 +1,4 @@
|
|||
---
|
||||
type: change
|
||||
issue: 3569
|
||||
title: "The normalized and exact string database indexes have been changed to provide faster string searches."
|
|
@ -1,5 +1,7 @@
|
|||
# JPA Server Search
|
||||
|
||||
## Limitations
|
||||
|
||||
The HAPI FHIR JPA Server fully implements most [FHIR search](https://www.hl7.org/fhir/search.html) operations for most versions of FHIR. However, there are some known limitations of the current implementation. Here is a partial list of search functionality that is not currently supported in HAPI FHIR:
|
||||
|
||||
### Chains within _has
|
||||
|
|
|
@ -406,6 +406,14 @@ The following algorithms are currently supported:
|
|||
Match names as strings in any order
|
||||
</td>
|
||||
<td>John Henry = John HENRY when exact=false, John Henry != Henry John</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>NICKNAME</td>
|
||||
<td>matcher</td>
|
||||
<td>
|
||||
True if one name is a nickname of the other
|
||||
</td>
|
||||
<td>Ken = Kenneth, Kenny = Ken. Allen != Allan.</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>IDENTIFIER</td>
|
||||
|
|
|
@ -128,6 +128,7 @@ import ca.uhn.fhir.jpa.search.warm.CacheWarmingSvcImpl;
|
|||
import ca.uhn.fhir.jpa.search.warm.ICacheWarmingSvc;
|
||||
import ca.uhn.fhir.jpa.searchparam.config.SearchParamConfig;
|
||||
import ca.uhn.fhir.jpa.searchparam.extractor.IResourceLinkResolver;
|
||||
import ca.uhn.fhir.jpa.searchparam.nickname.NicknameInterceptor;
|
||||
import ca.uhn.fhir.jpa.searchparam.registry.ISearchParamProvider;
|
||||
import ca.uhn.fhir.jpa.sp.ISearchParamPresenceSvc;
|
||||
import ca.uhn.fhir.jpa.sp.SearchParamPresenceSvcImpl;
|
||||
|
@ -164,6 +165,7 @@ import org.springframework.scheduling.concurrent.ConcurrentTaskScheduler;
|
|||
import org.springframework.scheduling.concurrent.ScheduledExecutorFactoryBean;
|
||||
|
||||
import javax.annotation.Nullable;
|
||||
import java.io.IOException;
|
||||
import java.util.Date;
|
||||
|
||||
/*
|
||||
|
@ -827,4 +829,10 @@ public class JpaConfig {
|
|||
public MemberMatcherR4Helper memberMatcherR4Helper(FhirContext theFhirContext) {
|
||||
return new MemberMatcherR4Helper(theFhirContext);
|
||||
}
|
||||
|
||||
@Lazy
|
||||
@Bean
|
||||
public NicknameInterceptor nicknameInterceptor() throws IOException {
|
||||
return new NicknameInterceptor();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -36,6 +36,8 @@ import ca.uhn.fhir.jpa.model.search.ExtendedLuceneIndexData;
|
|||
import ca.uhn.fhir.jpa.search.autocomplete.ValueSetAutocompleteOptions;
|
||||
import ca.uhn.fhir.jpa.search.autocomplete.ValueSetAutocompleteSearch;
|
||||
import ca.uhn.fhir.jpa.search.builder.ISearchQueryExecutor;
|
||||
import ca.uhn.fhir.jpa.search.builder.SearchQueryExecutors;
|
||||
import ca.uhn.fhir.jpa.search.builder.sql.SearchQueryExecutor;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.jpa.searchparam.extractor.ISearchParamExtractor;
|
||||
import ca.uhn.fhir.jpa.searchparam.extractor.ResourceIndexedSearchParams;
|
||||
|
@ -47,7 +49,9 @@ import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
|
|||
import ca.uhn.fhir.rest.server.util.ResourceSearchParams;
|
||||
import org.hibernate.search.backend.elasticsearch.ElasticsearchExtension;
|
||||
import org.hibernate.search.engine.search.query.SearchScroll;
|
||||
import org.hibernate.search.engine.search.query.dsl.SearchQueryOptionsStep;
|
||||
import org.hibernate.search.mapper.orm.Search;
|
||||
import org.hibernate.search.mapper.orm.search.loading.dsl.SearchLoadingOptionsStep;
|
||||
import org.hibernate.search.mapper.orm.session.SearchSession;
|
||||
import org.hibernate.search.mapper.orm.work.SearchIndexingPlan;
|
||||
import org.hibernate.search.util.common.SearchException;
|
||||
|
@ -132,14 +136,34 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc {
|
|||
|
||||
private ISearchQueryExecutor doSearch(String theResourceType, SearchParameterMap theParams, ResourcePersistentId theReferencingPid) {
|
||||
// keep this in sync with supportsSomeOf();
|
||||
SearchSession session = getSearchSession();
|
||||
if (theParams.getOffset() != null && theParams.getOffset() != 0) {
|
||||
// perform an offset search instead of a scroll one, which doesn't allow for offset
|
||||
List<Long> queryFetchResult = getSearchQueryOptionsStep(
|
||||
theResourceType, theParams, theReferencingPid).fetchHits(theParams.getOffset(), theParams.getCount());
|
||||
// indicate param was already processed, otherwise queries DB to process it
|
||||
theParams.setOffset(null);
|
||||
return SearchQueryExecutors.from(queryFetchResult);
|
||||
}
|
||||
|
||||
SearchScroll<Long> esResult = getSearchScroll(theResourceType, theParams, theReferencingPid);
|
||||
return new SearchScrollQueryExecutorAdaptor(esResult);
|
||||
}
|
||||
|
||||
|
||||
private SearchScroll<Long> getSearchScroll(String theResourceType, SearchParameterMap theParams, ResourcePersistentId theReferencingPid) {
|
||||
int scrollSize = 50;
|
||||
if (theParams.getCount()!=null) {
|
||||
scrollSize = theParams.getCount();
|
||||
}
|
||||
|
||||
SearchScroll<Long> esResult = session.search(ResourceTable.class)
|
||||
return getSearchQueryOptionsStep(theResourceType, theParams, theReferencingPid).scroll(scrollSize);
|
||||
}
|
||||
|
||||
|
||||
private SearchQueryOptionsStep<?, Long, SearchLoadingOptionsStep, ?, ?> getSearchQueryOptionsStep(
|
||||
String theResourceType, SearchParameterMap theParams, ResourcePersistentId theReferencingPid) {
|
||||
|
||||
return getSearchSession().search(ResourceTable.class)
|
||||
// The document id is the PK which is pid. We use this instead of _myId to avoid fetching the doc body.
|
||||
.select(
|
||||
// adapt the String docRef.id() to the Long that it really is.
|
||||
|
@ -188,11 +212,11 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc {
|
|||
//DROP EARLY HERE IF BOOL IS EMPTY?
|
||||
|
||||
})
|
||||
).scroll(scrollSize);
|
||||
|
||||
return new SearchScrollQueryExecutorAdaptor(esResult);
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
|
||||
@Nonnull
|
||||
private SearchSession getSearchSession() {
|
||||
return Search.session(myEntityManager);
|
||||
|
@ -314,4 +338,14 @@ public class FulltextSearchSvcImpl implements IFulltextSearchSvc {
|
|||
.map(p -> p.toResource(parser))
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
||||
|
||||
|
||||
@Override
|
||||
public long count(String theResourceName, SearchParameterMap theParams) {
|
||||
SearchQueryOptionsStep<?, Long, SearchLoadingOptionsStep, ?, ?> queryOptionsStep =
|
||||
getSearchQueryOptionsStep(theResourceName, theParams, null);
|
||||
|
||||
return queryOptionsStep.fetchTotalHitCount();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -45,6 +45,7 @@ public interface IFulltextSearchSvc {
|
|||
*/
|
||||
List<ResourcePersistentId> search(String theResourceName, SearchParameterMap theParams);
|
||||
|
||||
|
||||
/**
|
||||
* Query the index for a scrollable iterator of results.
|
||||
* No max size to the result iterator.
|
||||
|
@ -90,4 +91,8 @@ public interface IFulltextSearchSvc {
|
|||
*/
|
||||
List<IBaseResource> getResources(Collection<Long> thePids);
|
||||
|
||||
/**
|
||||
* Returns accurate hit count
|
||||
*/
|
||||
long count(String theResourceName, SearchParameterMap theParams);
|
||||
}
|
||||
|
|
|
@ -41,7 +41,7 @@ public interface ISearchBuilder {
|
|||
|
||||
IResultIterator createQuery(SearchParameterMap theParams, SearchRuntimeDetails theSearchRuntime, RequestDetails theRequest, @Nonnull RequestPartitionId theRequestPartitionId);
|
||||
|
||||
Iterator<Long> createCountQuery(SearchParameterMap theParams, String theSearchUuid, RequestDetails theRequest, RequestPartitionId theRequestPartitionId);
|
||||
Long createCountQuery(SearchParameterMap theParams, String theSearchUuid, RequestDetails theRequest, RequestPartitionId theRequestPartitionId);
|
||||
|
||||
void setMaxResultsToFetch(Integer theMaxResultsToFetch);
|
||||
|
||||
|
|
|
@ -227,14 +227,14 @@ public class LegacySearchBuilder implements ISearchBuilder {
|
|||
}
|
||||
|
||||
@Override
|
||||
public Iterator<Long> createCountQuery(SearchParameterMap theParams, String theSearchUuid, RequestDetails theRequest, @Nonnull RequestPartitionId theRequestPartitionId) {
|
||||
public Long createCountQuery(SearchParameterMap theParams, String theSearchUuid, RequestDetails theRequest, @Nonnull RequestPartitionId theRequestPartitionId) {
|
||||
assert theRequestPartitionId != null;
|
||||
assert TransactionSynchronizationManager.isActualTransactionActive();
|
||||
|
||||
init(theParams, theSearchUuid, theRequestPartitionId);
|
||||
|
||||
List<TypedQuery<Long>> queries = createQuery(null, null, null, true, theRequest, null);
|
||||
return new CountQueryIterator(queries.get(0));
|
||||
return new CountQueryIterator(queries.get(0)).next();
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -20,6 +20,7 @@ package ca.uhn.fhir.jpa.dao.dstu3;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.context.support.IValidationSupport;
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.context.support.TranslateConceptResults;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoConceptMap;
|
||||
|
@ -38,19 +39,19 @@ import org.hl7.fhir.exceptions.FHIRException;
|
|||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
|
||||
public class FhirResourceDaoConceptMapDstu3 extends BaseHapiFhirResourceDao<ConceptMap> implements IFhirResourceDaoConceptMap<ConceptMap> {
|
||||
@Autowired
|
||||
private ITermConceptMappingSvc myTermConceptMappingSvc;
|
||||
@Autowired
|
||||
private IValidationSupport myValidationSupport;
|
||||
|
||||
@Override
|
||||
public TranslateConceptResults translate(TranslationRequest theTranslationRequest, RequestDetails theRequestDetails) {
|
||||
if (theTranslationRequest.hasReverse() && theTranslationRequest.getReverseAsBoolean()) {
|
||||
return myTermConceptMappingSvc.translateWithReverse(theTranslationRequest);
|
||||
}
|
||||
|
||||
return myTermConceptMappingSvc.translate(theTranslationRequest);
|
||||
IValidationSupport.TranslateCodeRequest translateCodeRequest = theTranslationRequest.asTranslateCodeRequest();
|
||||
return myValidationSupport.translateConcept(translateCodeRequest);
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -20,6 +20,7 @@ package ca.uhn.fhir.jpa.dao.r4;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.context.support.IValidationSupport;
|
||||
import ca.uhn.fhir.context.support.TranslateConceptResults;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoConceptMap;
|
||||
import ca.uhn.fhir.jpa.api.model.TranslationRequest;
|
||||
|
@ -29,23 +30,28 @@ import ca.uhn.fhir.jpa.model.entity.ResourceTable;
|
|||
import ca.uhn.fhir.jpa.term.api.ITermConceptMappingSvc;
|
||||
import ca.uhn.fhir.rest.api.server.RequestDetails;
|
||||
import ca.uhn.fhir.rest.api.server.storage.TransactionDetails;
|
||||
import org.hl7.fhir.instance.model.api.IBase;
|
||||
import org.hl7.fhir.instance.model.api.IBaseCoding;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.r4.model.CodeableConcept;
|
||||
import org.hl7.fhir.r4.model.Coding;
|
||||
import org.hl7.fhir.r4.model.ConceptMap;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
import java.util.List;
|
||||
|
||||
public class FhirResourceDaoConceptMapR4 extends BaseHapiFhirResourceDao<ConceptMap> implements IFhirResourceDaoConceptMap<ConceptMap> {
|
||||
@Autowired
|
||||
private ITermConceptMappingSvc myTermConceptMappingSvc;
|
||||
@Autowired
|
||||
private IValidationSupport myValidationSupport;
|
||||
|
||||
@Override
|
||||
public TranslateConceptResults translate(TranslationRequest theTranslationRequest, RequestDetails theRequestDetails) {
|
||||
if (theTranslationRequest.hasReverse() && theTranslationRequest.getReverseAsBoolean()) {
|
||||
return myTermConceptMappingSvc.translateWithReverse(theTranslationRequest);
|
||||
}
|
||||
|
||||
return myTermConceptMappingSvc.translate(theTranslationRequest);
|
||||
IValidationSupport.TranslateCodeRequest translateCodeRequest = theTranslationRequest.asTranslateCodeRequest();
|
||||
return myValidationSupport.translateConcept(translateCodeRequest);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -20,6 +20,7 @@ package ca.uhn.fhir.jpa.dao.r5;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.context.support.IValidationSupport;
|
||||
import ca.uhn.fhir.context.support.TranslateConceptResults;
|
||||
import ca.uhn.fhir.jpa.api.dao.IFhirResourceDaoConceptMap;
|
||||
import ca.uhn.fhir.jpa.api.model.TranslationRequest;
|
||||
|
@ -35,19 +36,19 @@ import org.hl7.fhir.instance.model.api.IBaseResource;
|
|||
import org.hl7.fhir.r5.model.ConceptMap;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
|
||||
public class FhirResourceDaoConceptMapR5 extends BaseHapiFhirResourceDao<ConceptMap> implements IFhirResourceDaoConceptMap<ConceptMap> {
|
||||
@Autowired
|
||||
private ITermConceptMappingSvc myTermConceptMappingSvc;
|
||||
@Autowired
|
||||
private IValidationSupport myValidationSupport;
|
||||
|
||||
@Override
|
||||
public TranslateConceptResults translate(TranslationRequest theTranslationRequest, RequestDetails theRequestDetails) {
|
||||
if (theTranslationRequest.hasReverse() && theTranslationRequest.getReverseAsBoolean()) {
|
||||
return myTermConceptMappingSvc.translateWithReverse(theTranslationRequest);
|
||||
}
|
||||
|
||||
return myTermConceptMappingSvc.translate(theTranslationRequest);
|
||||
IValidationSupport.TranslateCodeRequest translateCodeRequest = theTranslationRequest.asTranslateCodeRequest();
|
||||
return myValidationSupport.translateConcept(translateCodeRequest);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -102,6 +102,7 @@ public class ExtendedLuceneSearchBuilder {
|
|||
case EMPTY_MODIFIER:
|
||||
return true;
|
||||
case Constants.PARAMQUALIFIER_MDM:
|
||||
case Constants.PARAMQUALIFIER_NICKNAME:
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
|
|
|
@ -30,13 +30,11 @@ import ca.uhn.fhir.mdm.log.Logs;
|
|||
import ca.uhn.fhir.model.api.IQueryParameterType;
|
||||
import ca.uhn.fhir.model.primitive.IdDt;
|
||||
import ca.uhn.fhir.rest.param.ReferenceParam;
|
||||
import ca.uhn.fhir.rest.param.StringParam;
|
||||
import ca.uhn.fhir.rest.param.TokenParam;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.slf4j.Logger;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import java.lang.reflect.Constructor;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
@ -92,7 +90,7 @@ public class MdmSearchExpandingInterceptor {
|
|||
|
||||
// If we failed, attempt to expand as a golden resource
|
||||
if (expandedResourceIds.isEmpty()) {
|
||||
expandedResourceIds = myMdmLinkExpandSvc.expandMdmByGoldenResourceId(new IdDt(refParam.getValue()));
|
||||
expandedResourceIds = myMdmLinkExpandSvc.expandMdmByGoldenResourceId(new IdDt(refParam.getValue()));
|
||||
}
|
||||
|
||||
//Rebuild the search param list.
|
||||
|
@ -104,8 +102,7 @@ public class MdmSearchExpandingInterceptor {
|
|||
.forEach(toAdd::add);
|
||||
}
|
||||
}
|
||||
}
|
||||
else if (theParamName.equalsIgnoreCase("_id")) {
|
||||
} else if (theParamName.equalsIgnoreCase("_id")) {
|
||||
expandIdParameter(iQueryParameterType, toAdd, toRemove);
|
||||
}
|
||||
}
|
||||
|
@ -117,6 +114,7 @@ public class MdmSearchExpandingInterceptor {
|
|||
/**
|
||||
* Expands out the provided _id parameter into all the various
|
||||
* ids of linked resources.
|
||||
*
|
||||
* @param theIdParameter
|
||||
* @param theAddList
|
||||
* @param theRemoveList
|
||||
|
@ -130,29 +128,21 @@ public class MdmSearchExpandingInterceptor {
|
|||
IIdType id;
|
||||
Creator<? extends IQueryParameterType> creator;
|
||||
boolean mdmExpand = false;
|
||||
if (theIdParameter instanceof StringParam) {
|
||||
StringParam param = (StringParam) theIdParameter;
|
||||
mdmExpand = param.isMdmExpand();
|
||||
id = new IdDt(param.getValue());
|
||||
creator = StringParam::new;
|
||||
}
|
||||
else if (theIdParameter instanceof TokenParam) {
|
||||
if (theIdParameter instanceof TokenParam) {
|
||||
TokenParam param = (TokenParam) theIdParameter;
|
||||
mdmExpand = param.isMdmExpand();
|
||||
id = new IdDt(param.getValue());
|
||||
creator = TokenParam::new;
|
||||
}
|
||||
else {
|
||||
} else {
|
||||
creator = null;
|
||||
id = null;
|
||||
}
|
||||
|
||||
if (id == null || creator == null) {
|
||||
if (id == null) {
|
||||
// in case the _id paramter type is different from the above
|
||||
ourLog.warn("_id parameter of incorrect type. Expected StringParam or TokenParam, but got {}. No expansion will be done!",
|
||||
theIdParameter.getClass().getSimpleName());
|
||||
}
|
||||
else if (mdmExpand) {
|
||||
} else if (mdmExpand) {
|
||||
ourLog.debug("_id parameter must be expanded out from: {}", id.getValue());
|
||||
|
||||
Set<String> expandedResourceIds = myMdmLinkExpandSvc.expandMdmBySourceResourceId(id);
|
||||
|
|
|
@ -295,6 +295,46 @@ public class HapiFhirJpaMigrationTasks extends BaseMigrationTasks<VersionEnum> {
|
|||
version
|
||||
.onTable("HFJ_BLK_EXPORT_JOB").modifyColumn("20220423.1", "EXP_TIME").nullable().withType(ColumnTypeEnum.DATE_TIMESTAMP);
|
||||
|
||||
// New Index on HFJ_RESOURCE for $reindex Operation - hapi-fhir #3534
|
||||
{
|
||||
version.onTable("HFJ_RESOURCE")
|
||||
.addIndex("20220425.1", "IDX_RES_TYPE_DEL_UPDATED")
|
||||
.unique(false)
|
||||
.online(true)
|
||||
.withColumns("RES_TYPE", "RES_DELETED_AT", "RES_UPDATED", "PARTITION_ID", "RES_ID");
|
||||
|
||||
// Drop existing Index on HFJ_RESOURCE.RES_TYPE since the new Index will meet the overall Index Demand
|
||||
version
|
||||
.onTable("HFJ_RESOURCE")
|
||||
.dropIndexOnline("20220425.2", "IDX_RES_TYPE");
|
||||
}
|
||||
|
||||
/**
|
||||
* Update string indexing
|
||||
* @see ca.uhn.fhir.jpa.search.builder.predicate.StringPredicateBuilder
|
||||
* @see ResourceIndexedSearchParamString
|
||||
*/
|
||||
{
|
||||
Builder.BuilderWithTableName tokenTable = version.onTable("HFJ_SPIDX_STRING");
|
||||
|
||||
// add res_id, and partition_id so queries are covered without row-reads.
|
||||
tokenTable
|
||||
.addIndex("20220428.1", "IDX_SP_STRING_HASH_NRM_V2")
|
||||
.unique(false)
|
||||
.online(true)
|
||||
.withColumns("HASH_NORM_PREFIX", "SP_VALUE_NORMALIZED", "RES_ID", "PARTITION_ID");
|
||||
tokenTable.dropIndexOnline("20220428.2", "IDX_SP_STRING_HASH_NRM");
|
||||
|
||||
tokenTable
|
||||
.addIndex("20220428.3", "IDX_SP_STRING_HASH_EXCT_V2")
|
||||
.unique(false)
|
||||
.online(true)
|
||||
.withColumns("HASH_EXACT", "RES_ID", "PARTITION_ID");
|
||||
tokenTable.dropIndexOnline("20220428.4", "IDX_SP_STRING_HASH_EXCT");
|
||||
|
||||
// we will drop the updated column. Start with the index.
|
||||
tokenTable.dropIndexOnline("20220428.5", "IDX_SP_STRING_UPDATED");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -508,6 +508,7 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
|
|||
return candidate.orElse(null);
|
||||
}
|
||||
|
||||
|
||||
private IBundleProvider executeQuery(String theResourceType, SearchParameterMap theParams, RequestDetails theRequestDetails, String theSearchUuid, ISearchBuilder theSb, Integer theLoadSynchronousUpTo, RequestPartitionId theRequestPartitionId) {
|
||||
SearchRuntimeDetails searchRuntimeDetails = new SearchRuntimeDetails(theRequestDetails, theSearchUuid);
|
||||
searchRuntimeDetails.setLoadSynchronous(true);
|
||||
|
@ -533,12 +534,11 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
|
|||
List<List<IQueryParameterType>> contentAndTerms = theParams.get(Constants.PARAM_CONTENT);
|
||||
List<List<IQueryParameterType>> textAndTerms = theParams.get(Constants.PARAM_TEXT);
|
||||
|
||||
Iterator<Long> countIterator = theSb.createCountQuery(theParams, theSearchUuid, theRequestDetails, theRequestPartitionId);
|
||||
count = theSb.createCountQuery(theParams, theSearchUuid, theRequestDetails, theRequestPartitionId);
|
||||
|
||||
if (contentAndTerms != null) theParams.put(Constants.PARAM_CONTENT, contentAndTerms);
|
||||
if (textAndTerms != null) theParams.put(Constants.PARAM_TEXT, textAndTerms);
|
||||
|
||||
count = countIterator.next();
|
||||
ourLog.trace("Got count {}", count);
|
||||
}
|
||||
|
||||
|
@ -1233,8 +1233,8 @@ public class SearchCoordinatorSvcImpl implements ISearchCoordinatorSvc {
|
|||
* we will have to clone those parameters here so that
|
||||
* the "correct" params are used in createQuery below
|
||||
*/
|
||||
Iterator<Long> countIterator = sb.createCountQuery(myParams.clone(), mySearch.getUuid(), myRequest, myRequestPartitionId);
|
||||
Long count = countIterator.hasNext() ? countIterator.next() : 0L;
|
||||
Long count = sb.createCountQuery(myParams.clone(), mySearch.getUuid(), myRequest, myRequestPartitionId);
|
||||
|
||||
ourLog.trace("Got count {}", count);
|
||||
|
||||
TransactionTemplate txTemplate = new TransactionTemplate(myManagedTxManager);
|
||||
|
|
|
@ -75,6 +75,7 @@ import ca.uhn.fhir.model.valueset.BundleEntrySearchModeEnum;
|
|||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum;
|
||||
import ca.uhn.fhir.rest.api.SearchContainedModeEnum;
|
||||
import ca.uhn.fhir.rest.api.SearchTotalModeEnum;
|
||||
import ca.uhn.fhir.rest.api.SortOrderEnum;
|
||||
import ca.uhn.fhir.rest.api.SortSpec;
|
||||
import ca.uhn.fhir.rest.api.server.IPreResourceAccessDetails;
|
||||
|
@ -97,6 +98,7 @@ import com.google.common.collect.Streams;
|
|||
import com.healthmarketscience.sqlbuilder.Condition;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.apache.commons.lang3.math.NumberUtils;
|
||||
import org.apache.jena.sparql.engine.QueryIterator;
|
||||
import org.hl7.fhir.instance.model.api.IAnyResource;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.slf4j.Logger;
|
||||
|
@ -119,6 +121,7 @@ import javax.persistence.criteria.From;
|
|||
import javax.persistence.criteria.Predicate;
|
||||
import javax.persistence.criteria.Root;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
|
@ -271,18 +274,24 @@ public class SearchBuilder implements ISearchBuilder {
|
|||
|
||||
@SuppressWarnings("ConstantConditions")
|
||||
@Override
|
||||
public Iterator<Long> createCountQuery(SearchParameterMap theParams, String theSearchUuid, RequestDetails theRequest, @Nonnull RequestPartitionId theRequestPartitionId) {
|
||||
public Long createCountQuery(SearchParameterMap theParams, String theSearchUuid,
|
||||
RequestDetails theRequest, @Nonnull RequestPartitionId theRequestPartitionId) {
|
||||
|
||||
assert theRequestPartitionId != null;
|
||||
assert TransactionSynchronizationManager.isActualTransactionActive();
|
||||
|
||||
init(theParams, theSearchUuid, theRequestPartitionId);
|
||||
|
||||
List<ISearchQueryExecutor> queries = createQuery(myParams, null, null, null, true, theRequest, null);
|
||||
if (queries.isEmpty()) {
|
||||
return Collections.emptyIterator();
|
||||
if (checkUseHibernateSearch()) {
|
||||
long count = myFulltextSearchSvc.count(myResourceName, theParams.clone());
|
||||
return count;
|
||||
}
|
||||
try (ISearchQueryExecutor queryExecutor = queries.get(0)) {
|
||||
return Lists.newArrayList(queryExecutor.next()).iterator();
|
||||
|
||||
List<ISearchQueryExecutor> queries = createQuery(theParams.clone(), null, null, null, true, theRequest, null);
|
||||
if (queries.isEmpty()) {
|
||||
return 0L;
|
||||
} else {
|
||||
return queries.get(0).next();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -309,6 +318,7 @@ public class SearchBuilder implements ISearchBuilder {
|
|||
return new QueryIterator(theSearchRuntimeDetails, theRequest);
|
||||
}
|
||||
|
||||
|
||||
private void init(SearchParameterMap theParams, String theSearchUuid, RequestPartitionId theRequestPartitionId) {
|
||||
myCriteriaBuilder = myEntityManager.getCriteriaBuilder();
|
||||
myParams = theParams;
|
||||
|
@ -316,7 +326,7 @@ public class SearchBuilder implements ISearchBuilder {
|
|||
myRequestPartitionId = theRequestPartitionId;
|
||||
}
|
||||
|
||||
private List<ISearchQueryExecutor> createQuery(SearchParameterMap theParams, SortSpec sort, Integer theOffset, Integer theMaximumResults, boolean theCount, RequestDetails theRequest,
|
||||
private List<ISearchQueryExecutor> createQuery(SearchParameterMap theParams, SortSpec sort, Integer theOffset, Integer theMaximumResults, boolean theCountOnlyFlag, RequestDetails theRequest,
|
||||
SearchRuntimeDetails theSearchRuntimeDetails) {
|
||||
|
||||
ArrayList<ISearchQueryExecutor> queries = new ArrayList<>();
|
||||
|
@ -359,8 +369,6 @@ public class SearchBuilder implements ISearchBuilder {
|
|||
!fulltextExecutor.hasNext() ||
|
||||
// Our hibernate search query doesn't respect partitions yet
|
||||
(!myPartitionSettings.isPartitioningEnabled() &&
|
||||
// we don't support _count=0 yet.
|
||||
!theCount &&
|
||||
// were there AND terms left? Then we still need the db.
|
||||
theParams.isEmpty() &&
|
||||
// not every param is a param. :-(
|
||||
|
@ -382,11 +390,11 @@ public class SearchBuilder implements ISearchBuilder {
|
|||
// We break the pids into chunks that fit in the 1k limit for jdbc bind params.
|
||||
// wipmb change chunk to take iterator
|
||||
new QueryChunker<Long>()
|
||||
.chunk(Streams.stream(fulltextExecutor).collect(Collectors.toList()), t -> doCreateChunkedQueries(theParams, t, theOffset, sort, theCount, theRequest, queries));
|
||||
.chunk(Streams.stream(fulltextExecutor).collect(Collectors.toList()), t -> doCreateChunkedQueries(theParams, t, theOffset, sort, theCountOnlyFlag, theRequest, queries));
|
||||
}
|
||||
} else {
|
||||
// do everything in the database.
|
||||
Optional<SearchQueryExecutor> query = createChunkedQuery(theParams, sort, theOffset, theMaximumResults, theCount, theRequest, null);
|
||||
Optional<SearchQueryExecutor> query = createChunkedQuery(theParams, sort, theOffset, theMaximumResults, theCountOnlyFlag, theRequest, null);
|
||||
query.ifPresent(queries::add);
|
||||
}
|
||||
|
||||
|
@ -408,7 +416,7 @@ public class SearchBuilder implements ISearchBuilder {
|
|||
}
|
||||
|
||||
// TODO MB someday we'll want a query planner to figure out if we _should_ or _must_ use the ft index, not just if we can.
|
||||
return fulltextEnabled &&
|
||||
return fulltextEnabled && myParams != null &&
|
||||
myParams.getSearchContainedMode() == SearchContainedModeEnum.FALSE &&
|
||||
myFulltextSearchSvc.supportsSomeOf(myParams);
|
||||
}
|
||||
|
@ -506,9 +514,9 @@ public class SearchBuilder implements ISearchBuilder {
|
|||
}
|
||||
}
|
||||
|
||||
private Optional<SearchQueryExecutor> createChunkedQuery(SearchParameterMap theParams, SortSpec sort, Integer theOffset, Integer theMaximumResults, boolean theCount, RequestDetails theRequest, List<Long> thePidList) {
|
||||
private Optional<SearchQueryExecutor> createChunkedQuery(SearchParameterMap theParams, SortSpec sort, Integer theOffset, Integer theMaximumResults, boolean theCountOnlyFlag, RequestDetails theRequest, List<Long> thePidList) {
|
||||
String sqlBuilderResourceName = myParams.getEverythingMode() == null ? myResourceName : null;
|
||||
SearchQueryBuilder sqlBuilder = new SearchQueryBuilder(myContext, myDaoConfig.getModelConfig(), myPartitionSettings, myRequestPartitionId, sqlBuilderResourceName, mySqlBuilderFactory, myDialectProvider, theCount);
|
||||
SearchQueryBuilder sqlBuilder = new SearchQueryBuilder(myContext, myDaoConfig.getModelConfig(), myPartitionSettings, myRequestPartitionId, sqlBuilderResourceName, mySqlBuilderFactory, myDialectProvider, theCountOnlyFlag);
|
||||
QueryStack queryStack3 = new QueryStack(theParams, myDaoConfig, myDaoConfig.getModelConfig(), myContext, sqlBuilder, mySearchParamRegistry, myPartitionSettings);
|
||||
|
||||
if (theParams.keySet().size() > 1 || theParams.getSort() != null || theParams.keySet().contains(Constants.PARAM_HAS) || isPotentiallyContainedReferenceParameterExistsAtRoot(theParams)) {
|
||||
|
@ -533,7 +541,7 @@ public class SearchBuilder implements ISearchBuilder {
|
|||
// is basically a reverse-include search. For type/Everything (as opposed to instance/Everything)
|
||||
// the one problem with this approach is that it doesn't catch Patients that have absolutely
|
||||
// nothing linked to them. So we do one additional query to make sure we catch those too.
|
||||
SearchQueryBuilder fetchPidsSqlBuilder = new SearchQueryBuilder(myContext, myDaoConfig.getModelConfig(), myPartitionSettings, myRequestPartitionId, myResourceName, mySqlBuilderFactory, myDialectProvider, theCount);
|
||||
SearchQueryBuilder fetchPidsSqlBuilder = new SearchQueryBuilder(myContext, myDaoConfig.getModelConfig(), myPartitionSettings, myRequestPartitionId, myResourceName, mySqlBuilderFactory, myDialectProvider, theCountOnlyFlag);
|
||||
GeneratedSql allTargetsSql = fetchPidsSqlBuilder.generate(theOffset, myMaxResultsToFetch);
|
||||
String sql = allTargetsSql.getSql();
|
||||
Object[] args = allTargetsSql.getBindVariables().toArray(new Object[0]);
|
||||
|
@ -613,7 +621,7 @@ public class SearchBuilder implements ISearchBuilder {
|
|||
* finds the appropriate resources) in an outer search which is then sorted
|
||||
*/
|
||||
if (sort != null) {
|
||||
assert !theCount;
|
||||
assert !theCountOnlyFlag;
|
||||
|
||||
createSort(queryStack3, sort);
|
||||
}
|
||||
|
|
|
@ -47,6 +47,8 @@ import org.apache.commons.lang3.StringUtils;
|
|||
import org.hibernate.ScrollMode;
|
||||
import org.hibernate.ScrollableResults;
|
||||
import org.hl7.fhir.exceptions.FHIRException;
|
||||
import org.hl7.fhir.instance.model.api.IBase;
|
||||
import org.hl7.fhir.instance.model.api.IBaseCoding;
|
||||
import org.hl7.fhir.r4.model.BooleanType;
|
||||
import org.hl7.fhir.r4.model.CodeType;
|
||||
import org.hl7.fhir.r4.model.CodeableConcept;
|
||||
|
@ -118,16 +120,10 @@ public class TermConceptMappingSvcImpl implements ITermConceptMappingSvc {
|
|||
@Override
|
||||
@Transactional
|
||||
public TranslateConceptResults translateConcept(TranslateCodeRequest theRequest) {
|
||||
|
||||
CodeableConcept sourceCodeableConcept = new CodeableConcept();
|
||||
sourceCodeableConcept
|
||||
.addCoding()
|
||||
.setSystem(theRequest.getSourceSystemUrl())
|
||||
.setCode(theRequest.getSourceCode());
|
||||
|
||||
TranslationRequest request = new TranslationRequest();
|
||||
request.setCodeableConcept(sourceCodeableConcept);
|
||||
request.setTargetSystem(new UriType(theRequest.getTargetSystemUrl()));
|
||||
TranslationRequest request = TranslationRequest.fromTranslateCodeRequest(theRequest);
|
||||
if (request.hasReverse() && request.getReverseAsBoolean()) {
|
||||
return translateWithReverse(request);
|
||||
}
|
||||
|
||||
return translate(request);
|
||||
}
|
||||
|
|
|
@ -31,6 +31,7 @@ import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
|||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.jpa.searchparam.extractor.SearchParamExtractorService;
|
||||
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
|
||||
import ca.uhn.fhir.util.SearchParameterUtil;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
@ -61,7 +62,8 @@ public class MdmSearchParamSvc {
|
|||
|
||||
public List<String> getValueFromResourceForSearchParam(IBaseResource theResource, String theSearchParam) {
|
||||
String resourceType = myFhirContext.getResourceType(theResource);
|
||||
RuntimeSearchParam activeSearchParam = mySearchParamRegistry.getActiveSearchParam(resourceType, theSearchParam);
|
||||
String searchParam = SearchParameterUtil.stripModifier(theSearchParam);
|
||||
RuntimeSearchParam activeSearchParam = mySearchParamRegistry.getActiveSearchParam(resourceType, searchParam);
|
||||
return mySearchParamExtractorService.extractParamValuesAsStrings(activeSearchParam, theResource);
|
||||
}
|
||||
|
||||
|
|
|
@ -15,6 +15,7 @@ import org.junit.jupiter.api.Test;
|
|||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
|
@ -161,68 +162,134 @@ public class MdmProviderMatchR4Test extends BaseProviderR4Test {
|
|||
public void testMatchWithCoarseDateGranularity() throws Exception {
|
||||
setMdmRuleJson("mdm/coarse-birthdate-mdm-rules.json");
|
||||
|
||||
String granularPatient = "{\n" +
|
||||
" \"resourceType\": \"Patient\",\n" +
|
||||
" \"active\": true,\n" +
|
||||
" \"name\": [\n" +
|
||||
" {\n" +
|
||||
" \"family\": \"PETERSON\",\n" +
|
||||
" \"given\": [\n" +
|
||||
" \"GARY\",\n" +
|
||||
" \"D\"\n" +
|
||||
" ]\n" +
|
||||
" }\n" +
|
||||
" ],\n" +
|
||||
" \"telecom\": [\n" +
|
||||
" {\n" +
|
||||
" \"system\": \"phone\",\n" +
|
||||
" \"value\": \"100100100\",\n" +
|
||||
" \"use\": \"home\"\n" +
|
||||
" }\n" +
|
||||
" ],\n" +
|
||||
" \"gender\": \"male\",\n" +
|
||||
" \"birthDate\": \"1991-10-10\",\n" +
|
||||
" \"address\": [\n" +
|
||||
" {\n" +
|
||||
" \"state\": \"NY\",\n" +
|
||||
" \"postalCode\": \"12313\"\n" +
|
||||
" }\n" +
|
||||
" ]\n" +
|
||||
"}";
|
||||
String granularPatient = """
|
||||
{
|
||||
"resourceType": "Patient",
|
||||
"active": true,
|
||||
"name": [
|
||||
{
|
||||
"family": "PETERSON",
|
||||
"given": [
|
||||
"GARY",
|
||||
"D"
|
||||
]
|
||||
}
|
||||
],
|
||||
"telecom": [
|
||||
{
|
||||
"system": "phone",
|
||||
"value": "100100100",
|
||||
"use": "home"
|
||||
}
|
||||
],
|
||||
"gender": "male",
|
||||
"birthDate": "1991-10-10",
|
||||
"address": [
|
||||
{
|
||||
"state": "NY",
|
||||
"postalCode": "12313"
|
||||
}
|
||||
]
|
||||
}""";
|
||||
IBaseResource iBaseResource = myFhirContext.newJsonParser().parseResource(granularPatient);
|
||||
createPatient((Patient) iBaseResource);
|
||||
|
||||
String coarsePatient = "{\n" +
|
||||
" \"resourceType\": \"Patient\",\n" +
|
||||
" \"active\": true,\n" +
|
||||
" \"name\": [\n" +
|
||||
" {\n" +
|
||||
" \"family\": \"PETERSON\",\n" +
|
||||
" \"given\": [\n" +
|
||||
" \"GARY\",\n" +
|
||||
" \"D\"\n" +
|
||||
" ]\n" +
|
||||
" }\n" +
|
||||
" ],\n" +
|
||||
" \"telecom\": [\n" +
|
||||
" {\n" +
|
||||
" \"system\": \"phone\",\n" +
|
||||
" \"value\": \"100100100\",\n" +
|
||||
" \"use\": \"home\"\n" +
|
||||
" }\n" +
|
||||
" ],\n" +
|
||||
" \"gender\": \"male\",\n" +
|
||||
" \"birthDate\": \"1991-10\",\n" +
|
||||
" \"address\": [\n" +
|
||||
" {\n" +
|
||||
" \"state\": \"NY\",\n" +
|
||||
" \"postalCode\": \"12313\"\n" +
|
||||
" }\n" +
|
||||
" ]\n" +
|
||||
"}";
|
||||
String coarsePatient = """
|
||||
{
|
||||
"resourceType": "Patient",
|
||||
"active": true,
|
||||
"name": [
|
||||
{
|
||||
"family": "PETERSON",
|
||||
"given": [
|
||||
"GARY",
|
||||
"D"
|
||||
]
|
||||
}
|
||||
],
|
||||
"telecom": [
|
||||
{
|
||||
"system": "phone",
|
||||
"value": "100100100",
|
||||
"use": "home"
|
||||
}
|
||||
],
|
||||
"gender": "male",
|
||||
"birthDate": "1991-10",
|
||||
"address": [
|
||||
{
|
||||
"state": "NY",
|
||||
"postalCode": "12313"
|
||||
}
|
||||
]
|
||||
}""";
|
||||
|
||||
IBaseResource coarseResource = myFhirContext.newJsonParser().parseResource(coarsePatient);
|
||||
Bundle result = (Bundle) myMdmProvider.match((Patient) coarseResource, new SystemRequestDetails());
|
||||
assertEquals(1, result.getEntry().size());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNicknameMatch() throws IOException {
|
||||
setMdmRuleJson("mdm/nickname-mdm-rules.json");
|
||||
|
||||
String formalPatientJson = """
|
||||
{
|
||||
"resourceType": "Patient",
|
||||
"active": true,
|
||||
"name": [
|
||||
{
|
||||
"family": "PETERSON",
|
||||
"given": [
|
||||
"Gregory"
|
||||
]
|
||||
}
|
||||
],
|
||||
"gender": "male"
|
||||
}""";
|
||||
Patient formalPatient = (Patient) myFhirContext.newJsonParser().parseResource(formalPatientJson);
|
||||
createPatient(formalPatient);
|
||||
|
||||
String noMatchPatientJson = """
|
||||
{
|
||||
"resourceType": "Patient",
|
||||
"active": true,
|
||||
"name": [
|
||||
{
|
||||
"family": "PETERSON",
|
||||
"given": [
|
||||
"Bob"
|
||||
]
|
||||
}
|
||||
],
|
||||
"gender": "male"
|
||||
}""";
|
||||
Patient noMatchPatient = (Patient) myFhirContext.newJsonParser().parseResource(noMatchPatientJson);
|
||||
createPatient(noMatchPatient);
|
||||
{
|
||||
Bundle result = (Bundle) myMdmProvider.match(noMatchPatient, new SystemRequestDetails());
|
||||
assertEquals(0, result.getEntry().size());
|
||||
}
|
||||
|
||||
String nickPatientJson = """
|
||||
{
|
||||
"resourceType": "Patient",
|
||||
"active": true,
|
||||
"name": [
|
||||
{
|
||||
"family": "PETERSON",
|
||||
"given": [
|
||||
"Greg"
|
||||
]
|
||||
}
|
||||
],
|
||||
"gender": "male"
|
||||
}""";
|
||||
|
||||
{
|
||||
Patient nickPatient = (Patient) myFhirContext.newJsonParser().parseResource(nickPatientJson);
|
||||
Bundle result = (Bundle) myMdmProvider.match(nickPatient, new SystemRequestDetails());
|
||||
assertEquals(1, result.getEntry().size());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
package ca.uhn.fhir.jpa.mdm.svc;
|
||||
|
||||
import ca.uhn.fhir.mdm.rules.json.MdmResourceSearchParamJson;
|
||||
import ca.uhn.fhir.jpa.mdm.BaseMdmR4Test;
|
||||
import ca.uhn.fhir.jpa.mdm.svc.candidate.MdmCandidateSearchCriteriaBuilderSvc;
|
||||
import ca.uhn.fhir.mdm.rules.json.MdmResourceSearchParamJson;
|
||||
import org.hl7.fhir.r4.model.HumanName;
|
||||
import org.hl7.fhir.r4.model.Patient;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
|
|
@ -4,15 +4,21 @@ import ca.uhn.fhir.interceptor.model.RequestPartitionId;
|
|||
import ca.uhn.fhir.jpa.mdm.BaseMdmR4Test;
|
||||
import ca.uhn.fhir.jpa.mdm.svc.candidate.MdmCandidateSearchSvc;
|
||||
import ca.uhn.fhir.jpa.mdm.svc.candidate.TooManyCandidatesException;
|
||||
import ca.uhn.fhir.jpa.searchparam.MatchUrlService;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.jpa.searchparam.nickname.NicknameInterceptor;
|
||||
import ca.uhn.fhir.mdm.rules.config.MdmSettings;
|
||||
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||
import org.hl7.fhir.instance.model.api.IAnyResource;
|
||||
import org.hl7.fhir.r4.model.Patient;
|
||||
import org.hl7.fhir.r4.model.Practitioner;
|
||||
import org.hl7.fhir.r4.model.Reference;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.Date;
|
||||
|
@ -28,10 +34,21 @@ public class MdmCandidateSearchSvcIT extends BaseMdmR4Test {
|
|||
MdmCandidateSearchSvc myMdmCandidateSearchSvc;
|
||||
@Autowired
|
||||
MdmSettings myMdmSettings;
|
||||
@Autowired
|
||||
MatchUrlService myMatchUrlService;
|
||||
|
||||
private NicknameInterceptor myNicknameInterceptor;
|
||||
|
||||
@BeforeEach
|
||||
public void before() throws IOException {
|
||||
myNicknameInterceptor = new NicknameInterceptor();
|
||||
myInterceptorRegistry.registerInterceptor(myNicknameInterceptor);
|
||||
}
|
||||
|
||||
@AfterEach
|
||||
public void resetMdmSettings() {
|
||||
myMdmSettings.setCandidateSearchLimit(MdmSettings.DEFAULT_CANDIDATE_SEARCH_LIMIT);
|
||||
myInterceptorRegistry.unregisterInterceptor(myNicknameInterceptor);
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -43,6 +60,42 @@ public class MdmCandidateSearchSvcIT extends BaseMdmR4Test {
|
|||
assertEquals(1, result.size());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNickname() {
|
||||
Practitioner formal = new Practitioner();
|
||||
formal.getNameFirstRep().addGiven("William");
|
||||
formal.getNameFirstRep().setFamily("Shatner");
|
||||
formal.setActive(true);
|
||||
myPractitionerDao.create(formal);
|
||||
|
||||
{
|
||||
// First confirm we can search for this practitioner using a nickname search
|
||||
SearchParameterMap map = myMatchUrlService.getResourceSearch("Practitioner?given:nickname=Bill&family=Shatner").getSearchParameterMap();
|
||||
map.setLoadSynchronous(true);
|
||||
IBundleProvider result = myPractitionerDao.search(map);
|
||||
assertEquals(1, result.size());
|
||||
Practitioner first = (Practitioner) result.getResources(0, 1).get(0);
|
||||
assertEquals("William", first.getNameFirstRep().getGivenAsSingleString());
|
||||
}
|
||||
|
||||
{
|
||||
// Now achieve the same match via mdm
|
||||
Practitioner nick = new Practitioner();
|
||||
nick.getNameFirstRep().addGiven("Bill");
|
||||
nick.getNameFirstRep().setFamily("Shatner");
|
||||
Collection<IAnyResource> result = myMdmCandidateSearchSvc.findCandidates("Practitioner", nick, RequestPartitionId.allPartitions());
|
||||
assertEquals(1, result.size());
|
||||
}
|
||||
|
||||
{
|
||||
// Should not match Bob
|
||||
Practitioner noMatch = new Practitioner();
|
||||
noMatch.getNameFirstRep().addGiven("Bob");
|
||||
noMatch.getNameFirstRep().setFamily("Shatner");
|
||||
Collection<IAnyResource> result = myMdmCandidateSearchSvc.findCandidates("Practitioner", noMatch, RequestPartitionId.allPartitions());
|
||||
assertEquals(0, result.size());
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void findCandidatesMultipleMatchesDoNotCauseDuplicates() {
|
||||
|
@ -83,9 +136,9 @@ public class MdmCandidateSearchSvcIT extends BaseMdmR4Test {
|
|||
Patient newJane = buildJanePatient();
|
||||
|
||||
createActivePatient();
|
||||
assertEquals(1, runInTransaction(()->myMdmCandidateSearchSvc.findCandidates("Patient", newJane, RequestPartitionId.allPartitions()).size()));
|
||||
assertEquals(1, runInTransaction(() -> myMdmCandidateSearchSvc.findCandidates("Patient", newJane, RequestPartitionId.allPartitions()).size()));
|
||||
createActivePatient();
|
||||
assertEquals(2, runInTransaction(()->myMdmCandidateSearchSvc.findCandidates("Patient", newJane, RequestPartitionId.allPartitions()).size()));
|
||||
assertEquals(2, runInTransaction(() -> myMdmCandidateSearchSvc.findCandidates("Patient", newJane, RequestPartitionId.allPartitions()).size()));
|
||||
|
||||
try {
|
||||
createActivePatient();
|
||||
|
|
|
@ -25,6 +25,13 @@
|
|||
"searchParams": [
|
||||
"general-practitioner"
|
||||
]
|
||||
},
|
||||
{
|
||||
"resourceType": "Practitioner",
|
||||
"searchParams": [
|
||||
"given:nickname",
|
||||
"family"
|
||||
]
|
||||
}
|
||||
],
|
||||
"candidateFilterSearchParams": [
|
||||
|
|
|
@ -0,0 +1,44 @@
|
|||
{
|
||||
"version":"1",
|
||||
"mdmTypes": ["Patient", "Practitioner"],
|
||||
"candidateSearchParams":[
|
||||
{
|
||||
"resourceType": "*",
|
||||
"searchParams": [
|
||||
"given:nickname",
|
||||
"family"
|
||||
]
|
||||
}
|
||||
],
|
||||
"candidateFilterSearchParams":[],
|
||||
"matchFields":[
|
||||
{
|
||||
"name":"gender",
|
||||
"resourceType":"Patient",
|
||||
"resourcePath":"gender",
|
||||
"matcher":{
|
||||
"algorithm":"STRING"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name":"nickname",
|
||||
"resourceType":"*",
|
||||
"resourcePath":"name.given",
|
||||
"matcher":{
|
||||
"algorithm":"NICKNAME"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name":"lastname",
|
||||
"resourceType":"*",
|
||||
"resourcePath":"name.family",
|
||||
"matcher":{
|
||||
"algorithm":"STRING",
|
||||
"exact": true
|
||||
}
|
||||
}
|
||||
],
|
||||
"matchResultMap":{
|
||||
"nickname,lastname": "MATCH"
|
||||
}
|
||||
}
|
|
@ -59,10 +59,9 @@ import static org.apache.commons.lang3.StringUtils.defaultString;
|
|||
// This is used for sorting, and for :contains queries currently
|
||||
@Index(name = "IDX_SP_STRING_HASH_IDENT", columnList = "HASH_IDENTITY"),
|
||||
|
||||
@Index(name = "IDX_SP_STRING_HASH_NRM", columnList = "HASH_NORM_PREFIX,SP_VALUE_NORMALIZED"),
|
||||
@Index(name = "IDX_SP_STRING_HASH_EXCT", columnList = "HASH_EXACT"),
|
||||
@Index(name = "IDX_SP_STRING_HASH_NRM_V2", columnList = "HASH_NORM_PREFIX,SP_VALUE_NORMALIZED,RES_ID,PARTITION_ID"),
|
||||
@Index(name = "IDX_SP_STRING_HASH_EXCT_V2", columnList = "HASH_EXACT,RES_ID,PARTITION_ID"),
|
||||
|
||||
@Index(name = "IDX_SP_STRING_UPDATED", columnList = "SP_UPDATED"),
|
||||
@Index(name = "IDX_SP_STRING_RESID", columnList = "RES_ID")
|
||||
})
|
||||
public class ResourceIndexedSearchParamString extends BaseResourceIndexedSearchParam {
|
||||
|
|
|
@ -73,9 +73,9 @@ import java.util.stream.Collectors;
|
|||
@Indexed(routingBinder= @RoutingBinderRef(type = ResourceTableRoutingBinder.class))
|
||||
@Entity
|
||||
@Table(name = "HFJ_RESOURCE", uniqueConstraints = {}, indexes = {
|
||||
// Do not reuse previously used index name: IDX_INDEXSTATUS
|
||||
// Do not reuse previously used index name: IDX_INDEXSTATUS, IDX_RES_TYPE
|
||||
@Index(name = "IDX_RES_DATE", columnList = "RES_UPDATED"),
|
||||
@Index(name = "IDX_RES_TYPE", columnList = "RES_TYPE"),
|
||||
@Index(name = "IDX_RES_TYPE_DEL_UPDATED", columnList = "RES_TYPE,RES_DELETED_AT,RES_UPDATED,PARTITION_ID,RES_ID"),
|
||||
})
|
||||
@NamedEntityGraph(name = "Resource.noJoins")
|
||||
public class ResourceTable extends BaseHasResource implements Serializable, IBasePersistedResource, IResourceLookup {
|
||||
|
|
|
@ -33,6 +33,7 @@ import ca.uhn.fhir.model.api.Include;
|
|||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.api.QualifiedParamList;
|
||||
import ca.uhn.fhir.rest.api.RestSearchParameterTypeEnum;
|
||||
import ca.uhn.fhir.rest.api.SearchTotalModeEnum;
|
||||
import ca.uhn.fhir.rest.param.DateRangeParam;
|
||||
import ca.uhn.fhir.rest.param.ParameterUtil;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
|
@ -113,6 +114,15 @@ public class MatchUrlService {
|
|||
throw new InvalidRequestException(Msg.code(485) + "Invalid " + Constants.PARAM_COUNT + " value: " + intString);
|
||||
}
|
||||
}
|
||||
} else if (Constants.PARAM_SEARCH_TOTAL_MODE.equals(nextParamName)) {
|
||||
if (paramList != null && ! paramList.isEmpty() && ! paramList.get(0).isEmpty()) {
|
||||
String totalModeEnumStr = paramList.get(0).get(0);
|
||||
try {
|
||||
paramMap.setSearchTotalMode(SearchTotalModeEnum.valueOf(totalModeEnumStr));
|
||||
} catch (IllegalArgumentException e) {
|
||||
throw new InvalidRequestException(Msg.code(2078) + "Invalid " + Constants.PARAM_SEARCH_TOTAL_MODE + " value: " + totalModeEnumStr);
|
||||
}
|
||||
}
|
||||
} else if (Constants.PARAM_OFFSET.equals(nextParamName)) {
|
||||
if (paramList != null && paramList.size() > 0 && paramList.get(0).size() > 0) {
|
||||
String intString = paramList.get(0).get(0);
|
||||
|
|
|
@ -0,0 +1,67 @@
|
|||
package ca.uhn.fhir.jpa.searchparam.nickname;
|
||||
|
||||
import ca.uhn.fhir.interceptor.api.Hook;
|
||||
import ca.uhn.fhir.interceptor.api.Pointcut;
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.model.api.IQueryParameterType;
|
||||
import ca.uhn.fhir.rest.param.StringParam;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
|
||||
public class NicknameInterceptor {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(NicknameInterceptor.class);
|
||||
|
||||
private final NicknameSvc myNicknameSvc;
|
||||
|
||||
public NicknameInterceptor() throws IOException {
|
||||
myNicknameSvc = new NicknameSvc();
|
||||
}
|
||||
|
||||
@Hook(Pointcut.STORAGE_PRESEARCH_REGISTERED)
|
||||
public void expandNicknames(SearchParameterMap theSearchParameterMap) {
|
||||
for (Map.Entry<String, List<List<IQueryParameterType>>> set : theSearchParameterMap.entrySet()) {
|
||||
String paramName = set.getKey();
|
||||
List<List<IQueryParameterType>> andList = set.getValue();
|
||||
for (List<IQueryParameterType> orList : andList) {
|
||||
// here we will know if it's an _id param or not
|
||||
// from theSearchParameterMap.keySet()
|
||||
expandAnyNicknameParameters(paramName, orList);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* If a Parameter is a string parameter, and it has been set to expand Nicknames, perform the expansion.
|
||||
*/
|
||||
private void expandAnyNicknameParameters(String theParamName, List<IQueryParameterType> orList) {
|
||||
List<IQueryParameterType> toAdd = new ArrayList<>();
|
||||
List<IQueryParameterType> toRemove = new ArrayList<>();
|
||||
for (IQueryParameterType iQueryParameterType : orList) {
|
||||
if (iQueryParameterType instanceof StringParam) {
|
||||
StringParam stringParam = (StringParam) iQueryParameterType;
|
||||
if (stringParam.isNicknameExpand()) {
|
||||
ourLog.debug("Found a nickname parameter to expand: {} {}", theParamName, stringParam);
|
||||
toRemove.add(stringParam);
|
||||
//First, attempt to expand as a formal name
|
||||
String name = stringParam.getValue().toLowerCase(Locale.ROOT);
|
||||
List<String> expansions = myNicknameSvc.getEquivalentNames(name);
|
||||
if (expansions == null) {
|
||||
continue;
|
||||
}
|
||||
ourLog.debug("Parameter has been expanded to: {} {}", theParamName, String.join(", ", expansions));
|
||||
expansions.stream()
|
||||
.map(StringParam::new)
|
||||
.forEach(toAdd::add);
|
||||
}
|
||||
}
|
||||
}
|
||||
orList.removeAll(toRemove);
|
||||
orList.addAll(toAdd);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,47 @@
|
|||
package ca.uhn.fhir.jpa.searchparam.nickname;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.IOException;
|
||||
import java.io.Reader;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
class NicknameMap {
|
||||
private final Map<String, List<String>> myFormalToNick = new HashMap<>();
|
||||
private final Map<String, List<String>> myNicknameToFormal = new HashMap<>();
|
||||
|
||||
void load(Reader theReader) throws IOException {
|
||||
try (BufferedReader reader = new BufferedReader(theReader)) {
|
||||
String line;
|
||||
while ((line = reader.readLine()) != null) {
|
||||
String[] parts = line.split(",");
|
||||
String key = parts[0];
|
||||
List<String> values = new ArrayList<>(Arrays.asList(parts).subList(1, parts.length));
|
||||
add(key, values);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void add(String theKey, List<String> theValues) {
|
||||
myFormalToNick.put(theKey, theValues);
|
||||
for (String value : theValues) {
|
||||
myNicknameToFormal.putIfAbsent(value, new ArrayList<>());
|
||||
myNicknameToFormal.get(value).add(theKey);
|
||||
}
|
||||
}
|
||||
|
||||
int size() {
|
||||
return myFormalToNick.size();
|
||||
}
|
||||
|
||||
List<String> getNicknamesFromFormalNameOrNull(String theName) {
|
||||
return myFormalToNick.get(theName);
|
||||
}
|
||||
|
||||
List<String> getFormalNamesFromNicknameOrNull(String theNickname) {
|
||||
return myNicknameToFormal.get(theNickname);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,53 @@
|
|||
package ca.uhn.fhir.jpa.searchparam.nickname;
|
||||
|
||||
import org.springframework.core.io.ClassPathResource;
|
||||
import org.springframework.core.io.Resource;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import java.io.Reader;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
public class NicknameSvc {
|
||||
private final NicknameMap myNicknameMap = new NicknameMap();
|
||||
|
||||
public NicknameSvc() throws IOException {
|
||||
Resource nicknameCsvResource = new ClassPathResource("/nickname/names.csv");
|
||||
try (InputStream inputStream = nicknameCsvResource.getInputStream()) {
|
||||
try (Reader reader = new InputStreamReader(inputStream)) {
|
||||
myNicknameMap.load(reader);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public int size() {
|
||||
return myNicknameMap.size();
|
||||
}
|
||||
|
||||
public List<String> getEquivalentNames(String theName) {
|
||||
List<String> retval = new ArrayList<>();
|
||||
retval.add(theName);
|
||||
|
||||
List<String> expansions;
|
||||
expansions = getNicknamesFromFormalNameOrNull(theName);
|
||||
if (expansions != null) {
|
||||
retval.addAll(expansions);
|
||||
} else {
|
||||
expansions = getFormalNamesFromNicknameOrNull(theName);
|
||||
if (expansions != null) {
|
||||
retval.addAll(expansions);
|
||||
}
|
||||
}
|
||||
return retval;
|
||||
}
|
||||
|
||||
List<String> getNicknamesFromFormalNameOrNull(String theName) {
|
||||
return myNicknameMap.getNicknamesFromFormalNameOrNull(theName);
|
||||
}
|
||||
|
||||
List<String> getFormalNamesFromNicknameOrNull(String theNickname) {
|
||||
return myNicknameMap.getFormalNamesFromNicknameOrNull(theNickname);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,201 @@
|
|||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "{}"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright {yyyy} {name of copyright owner}
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
|
@ -0,0 +1,12 @@
|
|||
# nickname-and-diminutive-names-lookup
|
||||
|
||||
A simple CSV file containing US given names (first name) and their associated nicknames or diminutive names.
|
||||
|
||||
This lookup file was initially created by mining this
|
||||
<a href="http://www.caagri.org/nicknames.html">genealogy page</a>. Because the lookup originates from a dataset used for genealogy purposes there are old names that aren't commonly used these days, but there are recent ones as well. Examples are "gregory", "greg", or "geoffrey", "geoff". There was also a significant effort to make it machine readable, i.e. separate it with commas, remove human conventions, like "rickie(y)" would need to be made into two different names "rickie", and "ricky".
|
||||
|
||||
There are Java, Perl, Python, and R parsers provided for convenience.
|
||||
|
||||
This is a relatively large list with roughly 1600 names. Any help from people to clean this list up and add to it is greatly appreciated.
|
||||
|
||||
This project was created by <a href="http://www.odu.edu/">Old Dominion University</a> - <a href="http://ws-dl.blogspot.com/">Web Science and Digital Libraries Research Group</a>. More information about the creation of this lookup can be found <a href="https://ws-dl.blogspot.com/2010/08/lookup-for-nicknames-and-diminutive.html">here</a>.
|
|
@ -0,0 +1,2 @@
|
|||
The files in this folder were cloned from https://github.com/carltonnorthern/nickname-and-diminutive-names-lookup
|
||||
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,59 @@
|
|||
package ca.uhn.fhir.jpa.searchparam.nickname;
|
||||
|
||||
import ca.uhn.fhir.jpa.searchparam.SearchParameterMap;
|
||||
import ca.uhn.fhir.rest.param.StringParam;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
|
||||
class NicknameInterceptorTest {
|
||||
@Test
|
||||
public void testExpandForward() throws IOException {
|
||||
// setup
|
||||
String formalName = "kenneth";
|
||||
SearchParameterMap sp = new SearchParameterMap();
|
||||
sp.add("name", new StringParam(formalName).setNicknameExpand(true));
|
||||
NicknameInterceptor svc = new NicknameInterceptor();
|
||||
|
||||
// execute
|
||||
svc.expandNicknames(sp);
|
||||
|
||||
// verify
|
||||
String newSearch = sp.toNormalizedQueryString(null);
|
||||
assertEquals("?name=ken,kendrick,kenneth,kenny", newSearch);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testExpandBackward() throws IOException {
|
||||
// setup
|
||||
String nickname = "ken";
|
||||
SearchParameterMap sp = new SearchParameterMap();
|
||||
sp.add("name", new StringParam(nickname).setNicknameExpand(true));
|
||||
NicknameInterceptor svc = new NicknameInterceptor();
|
||||
|
||||
// execute
|
||||
svc.expandNicknames(sp);
|
||||
|
||||
// verify
|
||||
String newSearch = sp.toNormalizedQueryString(null);
|
||||
assertEquals("?name=ken,kendall,kendrick,kendrik,kenneth,kenny,kent,mckenna", newSearch);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNothingToExpand() throws IOException {
|
||||
// setup
|
||||
String unusualName = "X Æ A-12";
|
||||
SearchParameterMap sp = new SearchParameterMap();
|
||||
sp.add("name", new StringParam(unusualName).setNicknameExpand(true));
|
||||
NicknameInterceptor svc = new NicknameInterceptor();
|
||||
|
||||
// execute
|
||||
svc.expandNicknames(sp);
|
||||
|
||||
// verify
|
||||
String newSearch = sp.toNormalizedQueryString(null);
|
||||
assertEquals("?name=x%20%C3%A6%20a-12", newSearch);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,30 @@
|
|||
package ca.uhn.fhir.jpa.searchparam.nickname;
|
||||
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.StringReader;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.containsInAnyOrder;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
|
||||
class NicknameMapTest {
|
||||
@Test
|
||||
public void testLoad() throws IOException {
|
||||
String testData = """
|
||||
kendall,ken,kenny
|
||||
kendra,kenj,kenji,kay,kenny
|
||||
kendrick,ken,kenny
|
||||
kendrik,ken,kenny
|
||||
kenneth,ken,kenny,kendrick
|
||||
kenny,ken,kenneth
|
||||
kent,ken,kenny,kendrick
|
||||
""";
|
||||
NicknameMap map = new NicknameMap();
|
||||
map.load(new StringReader(testData));
|
||||
assertEquals(7, map.size());
|
||||
assertThat(map.getNicknamesFromFormalNameOrNull("kenneth"), containsInAnyOrder("ken", "kenny", "kendrick"));
|
||||
assertThat(map.getFormalNamesFromNicknameOrNull("ken"), containsInAnyOrder("kendall", "kendrick", "kendrik", "kenneth", "kenny", "kent"));
|
||||
}
|
||||
}
|
|
@ -0,0 +1,15 @@
|
|||
package ca.uhn.fhir.jpa.searchparam.nickname;
|
||||
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
|
||||
class NicknameSvcTest {
|
||||
@Test
|
||||
public void testReadfile() throws IOException {
|
||||
NicknameSvc nicknameSvc = new NicknameSvc();
|
||||
assertEquals(1082, nicknameSvc.size());
|
||||
}
|
||||
}
|
|
@ -19,6 +19,7 @@ import org.springframework.web.util.UriComponentsBuilder;
|
|||
|
||||
import javax.annotation.Nonnull;
|
||||
import java.util.List;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
/**
|
||||
* Simplistic implementation of FHIR queries.
|
||||
|
@ -51,11 +52,13 @@ public class TestDaoSearch {
|
|||
return result.getAllResources();
|
||||
}
|
||||
|
||||
public List<String> searchForIds(String theQueryUrl) {
|
||||
public List<String> searchForIds(String theQueryUrl) {
|
||||
// fake out the server url parsing
|
||||
IBundleProvider result = searchForBundleProvider(theQueryUrl);
|
||||
|
||||
List<String> resourceIds = result.getAllResourceIds();
|
||||
// getAllResources is not safe as size is not always set
|
||||
List<String> resourceIds = result.getResources(0, Integer.MAX_VALUE)
|
||||
.stream().map(resource -> resource.getIdElement().getIdPart()).collect(Collectors.toList());
|
||||
return resourceIds;
|
||||
}
|
||||
|
||||
|
|
|
@ -12,6 +12,7 @@ import ca.uhn.fhir.jpa.test.BaseJpaR4Test;
|
|||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.CanonicalType;
|
||||
import org.hl7.fhir.r4.model.CodeableConcept;
|
||||
import org.hl7.fhir.r4.model.Coding;
|
||||
import org.hl7.fhir.r4.model.ConceptMap;
|
||||
import org.hl7.fhir.r4.model.Enumerations.ConceptMapEquivalence;
|
||||
import org.hl7.fhir.r4.model.Enumerations.PublicationStatus;
|
||||
|
@ -30,6 +31,7 @@ import org.springframework.transaction.support.TransactionTemplate;
|
|||
|
||||
import javax.annotation.Nonnull;
|
||||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
@ -1139,7 +1141,9 @@ public class FhirResourceDaoR4ConceptMapTest extends BaseJpaR4Test {
|
|||
|
||||
});
|
||||
|
||||
List<TranslateConceptResult> translationResults = myValidationSupport.translateConcept(new IValidationSupport.TranslateCodeRequest("http://source", "source1", "http://target")).getResults();
|
||||
CodeableConcept sourceCodeableConcept = new CodeableConcept();
|
||||
sourceCodeableConcept.addCoding(new Coding("http://source", "source1", null));
|
||||
List<TranslateConceptResult> translationResults = myValidationSupport.translateConcept(new IValidationSupport.TranslateCodeRequest(Collections.unmodifiableList(sourceCodeableConcept.getCoding()), "http://target")).getResults();
|
||||
assertThat(translationResults.toString(), translationResults, hasItem(
|
||||
new TranslateConceptResult()
|
||||
.setSystem("http://target")
|
||||
|
|
|
@ -30,6 +30,7 @@ import ca.uhn.fhir.jpa.test.config.TestR4Config;
|
|||
import ca.uhn.fhir.parser.DataFormatException;
|
||||
import ca.uhn.fhir.parser.IParser;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.api.SearchTotalModeEnum;
|
||||
import ca.uhn.fhir.rest.api.server.IBundleProvider;
|
||||
import ca.uhn.fhir.rest.api.server.storage.ResourcePersistentId;
|
||||
import ca.uhn.fhir.rest.param.ReferenceParam;
|
||||
|
@ -45,6 +46,7 @@ import ca.uhn.fhir.test.utilities.LogbackLevelOverrideExtension;
|
|||
import ca.uhn.fhir.test.utilities.docker.RequiresDocker;
|
||||
import ca.uhn.fhir.validation.FhirValidator;
|
||||
import ca.uhn.fhir.validation.ValidationResult;
|
||||
import com.google.common.collect.Lists;
|
||||
import org.hamcrest.Matchers;
|
||||
import org.hl7.fhir.instance.model.api.IBaseCoding;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
|
@ -71,6 +73,8 @@ import org.junit.jupiter.api.Nested;
|
|||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.junit.jupiter.api.extension.RegisterExtension;
|
||||
import org.junit.jupiter.params.ParameterizedTest;
|
||||
import org.junit.jupiter.params.provider.EnumSource;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
import org.springframework.test.annotation.DirtiesContext;
|
||||
|
@ -84,8 +88,11 @@ import org.springframework.transaction.PlatformTransactionManager;
|
|||
|
||||
import javax.persistence.EntityManager;
|
||||
import java.io.IOException;
|
||||
import java.time.Month;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.function.Consumer;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import static ca.uhn.fhir.jpa.model.util.UcumServiceUtil.UCUM_CODESYSTEM_URL;
|
||||
|
@ -99,6 +106,7 @@ import static org.hamcrest.Matchers.hasSize;
|
|||
import static org.hamcrest.Matchers.not;
|
||||
import static org.hamcrest.Matchers.stringContainsInOrder;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||
import static org.junit.jupiter.api.Assertions.assertThrows;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
|
@ -1472,9 +1480,9 @@ public class FhirResourceDaoR4SearchWithElasticSearchIT extends BaseJpaTest {
|
|||
}
|
||||
|
||||
private IIdType withObservationWithQuantity(double theValue, String theSystem, String theCode) {
|
||||
myResourceId = myTestDataBuilder.createObservation(
|
||||
myResourceId = myTestDataBuilder.createObservation(asArray(
|
||||
myTestDataBuilder.withQuantityAtPath("valueQuantity", theValue, theSystem, theCode)
|
||||
);
|
||||
));
|
||||
return myResourceId;
|
||||
}
|
||||
|
||||
|
@ -1490,6 +1498,87 @@ public class FhirResourceDaoR4SearchWithElasticSearchIT extends BaseJpaTest {
|
|||
}
|
||||
|
||||
|
||||
|
||||
@Nested
|
||||
public class TotalParameter {
|
||||
|
||||
@ParameterizedTest
|
||||
@EnumSource(SearchTotalModeEnum.class)
|
||||
public void totalParamSkipsSql(SearchTotalModeEnum theTotalModeEnum) {
|
||||
myTestDataBuilder.createObservation(asArray(myTestDataBuilder.withObservationCode("http://example.com/", "theCode")));
|
||||
|
||||
myCaptureQueriesListener.clear();
|
||||
myTestDaoSearch.searchForIds("Observation?code=theCode&_total=" + theTotalModeEnum);
|
||||
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||
assertEquals(1, myCaptureQueriesListener.getSelectQueriesForCurrentThread().size(), "bundle was built with no sql");
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void totalIsCorrect() {
|
||||
myTestDataBuilder.createObservation(asArray(myTestDataBuilder.withObservationCode("http://example.com/", "code-1")));
|
||||
myTestDataBuilder.createObservation(asArray(myTestDataBuilder.withObservationCode("http://example.com/", "code-2")));
|
||||
myTestDataBuilder.createObservation(asArray(myTestDataBuilder.withObservationCode("http://example.com/", "code-3")));
|
||||
|
||||
IBundleProvider resultBundle = myTestDaoSearch.searchForBundleProvider("Observation?_total=" + SearchTotalModeEnum.ACCURATE);
|
||||
assertEquals(3, resultBundle.size());
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
@Nested
|
||||
public class OffsetParameter {
|
||||
|
||||
@BeforeEach
|
||||
public void enableResourceStorage() {
|
||||
myDaoConfig.setStoreResourceInLuceneIndex(true);
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void offsetNoCount() {
|
||||
myTestDataBuilder.createObservation(asArray(myTestDataBuilder.withObservationCode("http://example.com/", "code-1")));
|
||||
IIdType idCode2 = myTestDataBuilder.createObservation(asArray(myTestDataBuilder.withObservationCode("http://example.com/", "code-2")));
|
||||
IIdType idCode3 = myTestDataBuilder.createObservation(asArray(myTestDataBuilder.withObservationCode("http://example.com/", "code-3")));
|
||||
|
||||
myCaptureQueriesListener.clear();
|
||||
List<String> resultIds = myTestDaoSearch.searchForIds("Observation?code=code-1,code-2,code-3&_offset=1");
|
||||
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||
|
||||
assertThat(resultIds, containsInAnyOrder(idCode2.getIdPart(), idCode3.getIdPart()));
|
||||
// make also sure no extra SQL queries were executed
|
||||
assertEquals(0, myCaptureQueriesListener.getSelectQueriesForCurrentThread().size(), "bundle was built with no sql");
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void offsetAndCount() {
|
||||
myTestDataBuilder.createObservation(asArray(myTestDataBuilder.withObservationCode("http://example.com/", "code-1")));
|
||||
IIdType idCode2 = myTestDataBuilder.createObservation(asArray(myTestDataBuilder.withObservationCode("http://example.com/", "code-2")));
|
||||
myTestDataBuilder.createObservation(asArray(myTestDataBuilder.withObservationCode("http://example.com/", "code-3")));
|
||||
|
||||
myCaptureQueriesListener.clear();
|
||||
List<String> resultIds = myTestDaoSearch.searchForIds("Observation?code=code-1,code-2,code-3&_offset=1&_count=1");
|
||||
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||
|
||||
assertThat(resultIds, containsInAnyOrder(idCode2.getIdPart()));
|
||||
// also validate no extra SQL queries were executed
|
||||
assertEquals(0, myCaptureQueriesListener.getSelectQueriesForCurrentThread().size(), "bundle was built with no sql");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private Consumer<IBaseResource>[] asArray(Consumer<IBaseResource> theIBaseResourceConsumer) {
|
||||
@SuppressWarnings("unchecked")
|
||||
Consumer<IBaseResource>[] array = (Consumer<IBaseResource>[]) new Consumer[]{theIBaseResourceConsumer};
|
||||
return array;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* Disallow context dirtying for nested classes
|
||||
*/
|
||||
|
|
|
@ -0,0 +1,57 @@
|
|||
package ca.uhn.fhir.jpa.provider.r4;
|
||||
|
||||
import ca.uhn.fhir.jpa.searchparam.nickname.NicknameInterceptor;
|
||||
import ca.uhn.fhir.util.BundleUtil;
|
||||
import org.hl7.fhir.r4.model.Bundle;
|
||||
import org.hl7.fhir.r4.model.Patient;
|
||||
import org.junit.jupiter.api.AfterEach;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.hasSize;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
|
||||
public class NicknameSearchR4Test extends BaseResourceProviderR4Test {
|
||||
@Autowired
|
||||
NicknameInterceptor myNicknameInterceptor;
|
||||
|
||||
@Override
|
||||
@AfterEach
|
||||
public void after() throws Exception {
|
||||
super.after();
|
||||
|
||||
myInterceptorRegistry.unregisterInterceptor(myNicknameInterceptor);
|
||||
}
|
||||
|
||||
@BeforeEach
|
||||
@Override
|
||||
public void before() throws Exception {
|
||||
super.before();
|
||||
myInterceptorRegistry.registerInterceptor(myNicknameInterceptor);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testExpandNickname() {
|
||||
Patient patient1 = new Patient();
|
||||
patient1.getNameFirstRep().addGiven("ken");
|
||||
myClient.create().resource(patient1).execute();
|
||||
|
||||
Patient patient2 = new Patient();
|
||||
patient2.getNameFirstRep().addGiven("bob");
|
||||
myClient.create().resource(patient2).execute();
|
||||
|
||||
Bundle result = myClient
|
||||
.loadPage()
|
||||
.byUrl(ourServerBase + "/Patient?name:nickname=kenneth")
|
||||
.andReturnBundle(Bundle.class)
|
||||
.execute();
|
||||
|
||||
List<Patient> resources = BundleUtil.toListOfResourcesOfType(myFhirContext,result, Patient.class);
|
||||
assertThat(resources, hasSize(1));
|
||||
assertEquals("ken", resources.get(0).getNameFirstRep().getGivenAsSingleString());
|
||||
}
|
||||
}
|
|
@ -36,6 +36,7 @@ import java.time.Instant;
|
|||
import java.util.Date;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.stream.IntStream;
|
||||
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.hasItem;
|
||||
|
@ -43,6 +44,7 @@ import static org.hamcrest.Matchers.is;
|
|||
import static org.hamcrest.Matchers.not;
|
||||
import static org.hamcrest.Matchers.nullValue;
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
@ExtendWith(SpringExtension.class)
|
||||
|
@ -148,7 +150,7 @@ public class ResourceProviderR4ElasticTest extends BaseResourceProviderR4Test {
|
|||
.useHttpGet()
|
||||
.execute();
|
||||
|
||||
assertEquals( 1, respParam.getParameter().size(), "Expected only 1 observation for blood count code");
|
||||
assertEquals(1, respParam.getParameter().size(), "Expected only 1 observation for blood count code");
|
||||
Bundle bundle = (Bundle) respParam.getParameter().get(0).getResource();
|
||||
Observation observation = (Observation) bundle.getEntryFirstRep().getResource();
|
||||
|
||||
|
@ -157,4 +159,47 @@ public class ResourceProviderR4ElasticTest extends BaseResourceProviderR4Test {
|
|||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCountReturnsExpectedSizeOfResources() throws IOException {
|
||||
IntStream.range(0, 10).forEach(index -> {
|
||||
Coding blood_count = new Coding("http://loinc.org", "789-8", "Erythrocytes in Blood by Automated count for code: " + (index + 1));
|
||||
createObservationWithCode(blood_count);
|
||||
});
|
||||
HttpGet countQuery = new HttpGet(ourServerBase + "/Observation?code=789-8&_count=5");
|
||||
myCaptureQueriesListener.clear();
|
||||
try (CloseableHttpResponse response = ourHttpClient.execute(countQuery)) {
|
||||
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||
// then
|
||||
assertEquals(Constants.STATUS_HTTP_200_OK, response.getStatusLine().getStatusCode());
|
||||
String text = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8);
|
||||
Bundle bundle = myFhirContext.newXmlParser().parseResource(Bundle.class, text);
|
||||
assertEquals(10, bundle.getTotal(), "Expected total 10 observations matching query");
|
||||
assertEquals(5, bundle.getEntry().size(), "Expected 5 observation entries to match page size");
|
||||
assertTrue(bundle.getLink("next").hasRelation());
|
||||
assertEquals(0, myCaptureQueriesListener.getSelectQueriesForCurrentThread().size(), "we build the bundle with no sql");
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCountZeroReturnsNoResourceEntries() throws IOException {
|
||||
IntStream.range(0, 10).forEach(index -> {
|
||||
Coding blood_count = new Coding("http://loinc.org", "789-8", "Erythrocytes in Blood by Automated count for code: " + (index + 1));
|
||||
createObservationWithCode(blood_count);
|
||||
});
|
||||
HttpGet countQuery = new HttpGet(ourServerBase + "/Observation?code=789-8&_count=0");
|
||||
myCaptureQueriesListener.clear();
|
||||
try (CloseableHttpResponse response = ourHttpClient.execute(countQuery)) {
|
||||
myCaptureQueriesListener.logSelectQueriesForCurrentThread();
|
||||
assertEquals(Constants.STATUS_HTTP_200_OK, response.getStatusLine().getStatusCode());
|
||||
String text = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8);
|
||||
Bundle bundle = myFhirContext.newXmlParser().parseResource(Bundle.class, text);
|
||||
assertEquals(10, bundle.getTotal(), "Expected total 10 observations matching query");
|
||||
assertEquals(0, bundle.getEntry().size(), "Expected no entries in bundle");
|
||||
assertNull(bundle.getLink("next"), "Expected no 'next' link");
|
||||
assertNull(bundle.getLink("prev"), "Expected no 'prev' link");
|
||||
assertEquals(0, myCaptureQueriesListener.getSelectQueriesForCurrentThread().size(), "we build the bundle with no sql");
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -561,7 +561,7 @@ public class SearchCoordinatorSvcImplTest {
|
|||
params.setSearchTotalMode(SearchTotalModeEnum.ACCURATE);
|
||||
|
||||
List<ResourcePersistentId> pids = createPidSequence(30);
|
||||
when(mySearchBuilder.createCountQuery(same(params), any(String.class), nullable(RequestDetails.class), nullable(RequestPartitionId.class))).thenReturn(Lists.newArrayList(Long.valueOf(20L)).iterator());
|
||||
when(mySearchBuilder.createCountQuery(same(params), any(String.class),nullable(RequestDetails.class), nullable(RequestPartitionId.class))).thenReturn(20L);
|
||||
when(mySearchBuilder.createQuery(same(params), any(), nullable(RequestDetails.class), nullable(RequestPartitionId.class))).thenReturn(new ResultIterator(pids.subList(10, 20).iterator()));
|
||||
|
||||
doAnswer(loadPids()).when(mySearchBuilder).loadResourcesByPid(any(Collection.class), any(Collection.class), any(List.class), anyBoolean(), any());
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
package ca.uhn.fhir.jpa.term;
|
||||
|
||||
import ca.uhn.fhir.context.support.IValidationSupport;
|
||||
import ca.uhn.fhir.context.support.TranslateConceptResult;
|
||||
import ca.uhn.fhir.context.support.TranslateConceptResults;
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
|
@ -8,14 +9,19 @@ import ca.uhn.fhir.jpa.entity.TermConceptMap;
|
|||
import ca.uhn.fhir.jpa.entity.TermConceptMapGroup;
|
||||
import ca.uhn.fhir.jpa.entity.TermConceptMapGroupElement;
|
||||
import ca.uhn.fhir.jpa.entity.TermConceptMapGroupElementTarget;
|
||||
import ca.uhn.fhir.jpa.term.api.ITermConceptMappingSvc;
|
||||
import ca.uhn.fhir.rest.server.exceptions.UnprocessableEntityException;
|
||||
import org.hl7.fhir.instance.model.api.IIdType;
|
||||
import org.hl7.fhir.r4.model.CanonicalType;
|
||||
import org.hl7.fhir.r4.model.CodeableConcept;
|
||||
import org.hl7.fhir.r4.model.Coding;
|
||||
import org.hl7.fhir.r4.model.ConceptMap;
|
||||
import org.hl7.fhir.r4.model.Enumerations;
|
||||
import org.hl7.fhir.r4.model.StringType;
|
||||
import org.hl7.fhir.r4.model.UriType;
|
||||
import org.hl7.fhir.r4.model.codesystems.HttpVerb;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.mockito.ArgumentCaptor;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.data.domain.PageRequest;
|
||||
|
@ -25,6 +31,7 @@ import org.springframework.transaction.support.TransactionCallbackWithoutResult;
|
|||
import org.springframework.transaction.support.TransactionTemplate;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
|
@ -32,6 +39,11 @@ import static org.junit.jupiter.api.Assertions.assertFalse;
|
|||
import static org.junit.jupiter.api.Assertions.assertNotNull;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
import static org.mockito.Mockito.doReturn;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.spy;
|
||||
import static org.mockito.Mockito.verify;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
public class TermConceptMappingSvcImplTest extends BaseTermR4Test {
|
||||
private static final Logger ourLog = LoggerFactory.getLogger(TermConceptMappingSvcImplTest.class);
|
||||
|
@ -1547,6 +1559,100 @@ public class TermConceptMappingSvcImplTest extends BaseTermR4Test {
|
|||
});
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTranslateCodeRequestToTranslationRequestMapping() {
|
||||
CodeableConcept codeableConcept = new CodeableConcept();
|
||||
Coding coding = new Coding("theSourceSystemUrl", "theSourceCode", null);
|
||||
codeableConcept.addCoding(coding);
|
||||
|
||||
IValidationSupport.TranslateCodeRequest theRequest = new IValidationSupport.TranslateCodeRequest(
|
||||
Collections.unmodifiableList(codeableConcept.getCoding()),
|
||||
"theTargetSystemUrl",
|
||||
"theConceptMapUrl",
|
||||
"theConceptMapVersion",
|
||||
"theSourceValueSetUrl",
|
||||
"theTargetValueSetUrl",
|
||||
0L,
|
||||
false
|
||||
);
|
||||
|
||||
CodeableConcept sourceCodeableConcept = new CodeableConcept();
|
||||
sourceCodeableConcept
|
||||
.addCoding()
|
||||
.setSystem(coding.getSystem())
|
||||
.setCode(coding.getCode());
|
||||
|
||||
TranslationRequest expected = new TranslationRequest();
|
||||
expected.setCodeableConcept(sourceCodeableConcept);
|
||||
expected.setConceptMapVersion(new StringType(theRequest.getConceptMapVersion()));
|
||||
expected.setUrl(new UriType(theRequest.getConceptMapUrl()));
|
||||
expected.setSource(new UriType(theRequest.getSourceValueSetUrl()));
|
||||
expected.setTarget(new UriType(theRequest.getTargetValueSetUrl()));
|
||||
expected.setTargetSystem(new UriType(theRequest.getTargetSystemUrl()));
|
||||
expected.setResourceId(theRequest.getResourcePid());
|
||||
expected.setReverse(theRequest.isReverse());
|
||||
|
||||
ITermConceptMappingSvc mock = mock(TermConceptMappingSvcImpl.class);
|
||||
ArgumentCaptor<TranslationRequest> argument = ArgumentCaptor.forClass(TranslationRequest.class);
|
||||
when(mock.translate(expected)).thenReturn(new TranslateConceptResults());
|
||||
when(mock.translateConcept(theRequest)).thenCallRealMethod();
|
||||
mock.translateConcept(theRequest);
|
||||
verify(mock).translate(argument.capture());
|
||||
assertSameTranslationRequest(expected, argument.getValue());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTranslateCodeRequestWithReverseToTranslationRequestMapping() {
|
||||
CodeableConcept codeableConcept = new CodeableConcept();
|
||||
Coding coding = new Coding("theSourceSystemUrl", "theSourceCode", null);
|
||||
codeableConcept.addCoding(coding);
|
||||
|
||||
IValidationSupport.TranslateCodeRequest theRequest = new IValidationSupport.TranslateCodeRequest(
|
||||
Collections.unmodifiableList(codeableConcept.getCoding()),
|
||||
"theTargetSystemUrl",
|
||||
"theConceptMapUrl",
|
||||
"theConceptMapVersion",
|
||||
"theSourceValueSetUrl",
|
||||
"theTargetValueSetUrl",
|
||||
0L,
|
||||
true
|
||||
);
|
||||
|
||||
CodeableConcept sourceCodeableConcept = new CodeableConcept();
|
||||
sourceCodeableConcept
|
||||
.addCoding()
|
||||
.setSystem(coding.getSystem())
|
||||
.setCode(coding.getCode());
|
||||
|
||||
TranslationRequest expected = new TranslationRequest();
|
||||
expected.setCodeableConcept(sourceCodeableConcept);
|
||||
expected.setConceptMapVersion(new StringType(theRequest.getConceptMapVersion()));
|
||||
expected.setUrl(new UriType(theRequest.getConceptMapUrl()));
|
||||
expected.setSource(new UriType(theRequest.getSourceValueSetUrl()));
|
||||
expected.setTarget(new UriType(theRequest.getTargetValueSetUrl()));
|
||||
expected.setTargetSystem(new UriType(theRequest.getTargetSystemUrl()));
|
||||
expected.setResourceId(theRequest.getResourcePid());
|
||||
expected.setReverse(theRequest.isReverse());
|
||||
|
||||
ITermConceptMappingSvc mock = mock(TermConceptMappingSvcImpl.class);
|
||||
ArgumentCaptor<TranslationRequest> argument = ArgumentCaptor.forClass(TranslationRequest.class);
|
||||
when(mock.translate(expected)).thenReturn(new TranslateConceptResults());
|
||||
when(mock.translateConcept(theRequest)).thenCallRealMethod();
|
||||
mock.translateConcept(theRequest);
|
||||
verify(mock).translateWithReverse(argument.capture());
|
||||
assertSameTranslationRequest(expected, argument.getValue());
|
||||
}
|
||||
|
||||
private static void assertSameTranslationRequest(TranslationRequest expected, TranslationRequest actual) {
|
||||
assertTrue(expected.getCodeableConcept().equalsDeep(actual.getCodeableConcept()));
|
||||
assertEquals(expected.getConceptMapVersion().asStringValue(), actual.getConceptMapVersion().asStringValue());
|
||||
assertEquals(expected.getUrl().asStringValue(), actual.getUrl().asStringValue());
|
||||
assertEquals(expected.getSource().asStringValue(), actual.getSource().asStringValue());
|
||||
assertEquals(expected.getTarget().asStringValue(), actual.getTarget().asStringValue());
|
||||
assertEquals(expected.getTargetSystem().asStringValue(), actual.getTargetSystem().asStringValue());
|
||||
assertEquals(expected.getResourceId(), actual.getResourceId());
|
||||
assertEquals(expected.getReverseAsBoolean(), actual.getReverseAsBoolean());
|
||||
}
|
||||
|
||||
private void createAndPersistConceptMap() {
|
||||
ConceptMap conceptMap = createConceptMap();
|
||||
|
|
|
@ -20,14 +20,14 @@ package ca.uhn.fhir.mdm.rules.config;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.context.ConfigurationException;
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.FhirVersionEnum;
|
||||
import ca.uhn.fhir.context.RuntimeResourceDefinition;
|
||||
import ca.uhn.fhir.fhirpath.IFhirPath;
|
||||
import ca.uhn.fhir.mdm.api.MdmConstants;
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.mdm.api.IMdmRuleValidator;
|
||||
import ca.uhn.fhir.mdm.api.MdmConstants;
|
||||
import ca.uhn.fhir.mdm.rules.json.MdmFieldMatchJson;
|
||||
import ca.uhn.fhir.mdm.rules.json.MdmFilterSearchParamJson;
|
||||
import ca.uhn.fhir.mdm.rules.json.MdmResourceSearchParamJson;
|
||||
|
@ -36,6 +36,7 @@ import ca.uhn.fhir.mdm.rules.json.MdmSimilarityJson;
|
|||
import ca.uhn.fhir.parser.DataFormatException;
|
||||
import ca.uhn.fhir.rest.server.util.ISearchParamRegistry;
|
||||
import ca.uhn.fhir.util.FhirTerser;
|
||||
import ca.uhn.fhir.util.SearchParameterUtil;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
@ -150,7 +151,8 @@ public class MdmRuleValidator implements IMdmRuleValidator {
|
|||
}
|
||||
|
||||
private void validateResourceSearchParam(String theFieldName, String theResourceType, String theSearchParam) {
|
||||
if (mySearchParamRetriever.getActiveSearchParam(theResourceType, theSearchParam) == null) {
|
||||
String searchParam = SearchParameterUtil.stripModifier(theSearchParam);
|
||||
if (mySearchParamRetriever.getActiveSearchParam(theResourceType, searchParam) == null) {
|
||||
throw new ConfigurationException(Msg.code(1511) + "Error in " + theFieldName + ": " + theResourceType + " does not have a search parameter called '" + theSearchParam + "'");
|
||||
}
|
||||
}
|
||||
|
|
|
@ -40,6 +40,7 @@ public enum MdmMatcherEnum {
|
|||
NYSIIS(new HapiStringMatcher(new PhoneticEncoderMatcher(PhoneticEncoderEnum.NYSIIS))),
|
||||
REFINED_SOUNDEX(new HapiStringMatcher(new PhoneticEncoderMatcher(PhoneticEncoderEnum.REFINED_SOUNDEX))),
|
||||
SOUNDEX(new HapiStringMatcher(new PhoneticEncoderMatcher(PhoneticEncoderEnum.SOUNDEX))),
|
||||
NICKNAME(new HapiStringMatcher(new NicknameMatcher())),
|
||||
|
||||
STRING(new HapiStringMatcher()),
|
||||
SUBSTRING(new HapiStringMatcher(new SubstringStringMatcher())),
|
||||
|
|
|
@ -0,0 +1,30 @@
|
|||
package ca.uhn.fhir.mdm.rules.matcher;
|
||||
|
||||
import ca.uhn.fhir.context.ConfigurationException;
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.jpa.searchparam.nickname.NicknameSvc;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
|
||||
public class NicknameMatcher implements IMdmStringMatcher {
|
||||
private final NicknameSvc myNicknameSvc;
|
||||
|
||||
public NicknameMatcher() {
|
||||
try {
|
||||
myNicknameSvc = new NicknameSvc();
|
||||
} catch (IOException e) {
|
||||
throw new ConfigurationException(Msg.code(2078) + "Unable to load nicknames", e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean matches(String theLeftString, String theRightString) {
|
||||
String leftString = theLeftString.toLowerCase(Locale.ROOT);
|
||||
String rightString = theRightString.toLowerCase(Locale.ROOT);
|
||||
|
||||
List<String> leftNames = myNicknameSvc.getEquivalentNames(leftString);
|
||||
return leftNames.contains(rightString);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,25 @@
|
|||
package ca.uhn.fhir.mdm.rules.matcher;
|
||||
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
|
||||
class NicknameMatcherTest {
|
||||
IMdmStringMatcher matcher = new NicknameMatcher();
|
||||
|
||||
@Test
|
||||
public void testMatches() {
|
||||
assertTrue(matcher.matches("Ken", "ken"));
|
||||
assertTrue(matcher.matches("ken", "Ken"));
|
||||
assertTrue(matcher.matches("Ken", "Ken"));
|
||||
assertTrue(matcher.matches("Kenneth", "Ken"));
|
||||
assertTrue(matcher.matches("Kenneth", "Kenny"));
|
||||
assertTrue(matcher.matches("Ken", "Kenneth"));
|
||||
assertTrue(matcher.matches("Kenny", "Kenneth"));
|
||||
|
||||
assertFalse(matcher.matches("Ken", "Bob"));
|
||||
// These aren't nickname matches. If you want matches like these use a phonetic matcher
|
||||
assertFalse(matcher.matches("Allen", "Allan"));
|
||||
}
|
||||
}
|
|
@ -36,9 +36,11 @@ import com.google.common.collect.ArrayListMultimap;
|
|||
import com.google.common.collect.Multimap;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.hl7.fhir.instance.model.api.IBase;
|
||||
import org.hl7.fhir.instance.model.api.IBaseCoding;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
@ -153,12 +155,17 @@ public class ResponseTerminologyTranslationInterceptor extends BaseResponseTermi
|
|||
if (!foundSystemsToCodes.containsKey(wantTargetSystem)) {
|
||||
|
||||
for (String code : foundSystemsToCodes.get(nextSourceSystem)) {
|
||||
TranslateConceptResults translateConceptResults = myValidationSupport.translateConcept(new IValidationSupport.TranslateCodeRequest(nextSourceSystem, code, wantTargetSystem));
|
||||
List<IBaseCoding> codings = new ArrayList<IBaseCoding>();
|
||||
codings.add(createCodingFromPrimitives(nextSourceSystem, code, null));
|
||||
TranslateConceptResults translateConceptResults = myValidationSupport.translateConcept(new IValidationSupport.TranslateCodeRequest(codings, wantTargetSystem));
|
||||
if (translateConceptResults != null) {
|
||||
List<TranslateConceptResult> mappings = translateConceptResults.getResults();
|
||||
for (TranslateConceptResult nextMapping : mappings) {
|
||||
|
||||
IBase newCoding = createCodingFromMappingTarget(nextMapping);
|
||||
IBase newCoding = createCodingFromPrimitives(
|
||||
nextMapping.getSystem(),
|
||||
nextMapping.getCode(),
|
||||
nextMapping.getDisplay());
|
||||
|
||||
// Add coding to existing CodeableConcept
|
||||
myCodeableConceptCodingChild.getMutator().addValue(theElement, newCoding);
|
||||
|
@ -174,14 +181,14 @@ public class ResponseTerminologyTranslationInterceptor extends BaseResponseTermi
|
|||
|
||||
}
|
||||
|
||||
private IBase createCodingFromMappingTarget(TranslateConceptResult nextMapping) {
|
||||
IBase newCoding = myCodingDefinitition.newInstance();
|
||||
IPrimitiveType<?> newSystem = myUriDefinition.newInstance(nextMapping.getSystem());
|
||||
private IBaseCoding createCodingFromPrimitives(String system, String code, String display) {
|
||||
IBaseCoding newCoding = (IBaseCoding) myCodingDefinitition.newInstance();
|
||||
IPrimitiveType<?> newSystem = myUriDefinition.newInstance(system);
|
||||
myCodingSystemChild.getMutator().addValue(newCoding, newSystem);
|
||||
IPrimitiveType<?> newCode = myCodeDefinition.newInstance(nextMapping.getCode());
|
||||
IPrimitiveType<?> newCode = myCodeDefinition.newInstance(code);
|
||||
myCodingCodeChild.getMutator().addValue(newCoding, newCode);
|
||||
if (isNotBlank(nextMapping.getDisplay())) {
|
||||
IPrimitiveType<?> newDisplay = myStringDefinition.newInstance(nextMapping.getDisplay());
|
||||
if (isNotBlank(display)) {
|
||||
IPrimitiveType<?> newDisplay = myStringDefinition.newInstance(display);
|
||||
myCodingDisplayChild.getMutator().addValue(newCoding, newDisplay);
|
||||
}
|
||||
return newCoding;
|
||||
|
|
|
@ -48,11 +48,24 @@ public class RuleTarget {
|
|||
for (Map.Entry<String, String[]> entry : theParameters.entrySet()) {
|
||||
String key = entry.getKey();
|
||||
String[] value = entry.getValue();
|
||||
if (key.endsWith(Constants.PARAMQUALIFIER_MDM)) {
|
||||
key = key.split(Constants.PARAMQUALIFIER_MDM)[0];
|
||||
}
|
||||
key = stripMdmQualifier(key);
|
||||
key = stripNicknameQualifier(key);
|
||||
retval.put(key, value);
|
||||
}
|
||||
return retval;
|
||||
}
|
||||
|
||||
private String stripMdmQualifier(String theKey) {
|
||||
if (theKey.endsWith(Constants.PARAMQUALIFIER_MDM)) {
|
||||
theKey = theKey.split(Constants.PARAMQUALIFIER_MDM)[0];
|
||||
}
|
||||
return theKey;
|
||||
}
|
||||
|
||||
private String stripNicknameQualifier(String theKey) {
|
||||
if (theKey.endsWith(Constants.PARAMQUALIFIER_NICKNAME)) {
|
||||
theKey = theKey.split(Constants.PARAMQUALIFIER_NICKNAME)[0];
|
||||
}
|
||||
return theKey;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -162,6 +162,8 @@ public abstract class BaseTask {
|
|||
|
||||
private int doExecuteSql(@Language("SQL") String theSql, Object[] theArguments) {
|
||||
JdbcTemplate jdbcTemplate = getConnectionProperties().newJdbcTemplate();
|
||||
// 0 means no timeout -- we use this for index rebuilds that may take time.
|
||||
jdbcTemplate.setQueryTimeout(0);
|
||||
try {
|
||||
int changesCount = jdbcTemplate.update(theSql, theArguments);
|
||||
if (!"true".equals(System.getProperty("unit_test_mode"))) {
|
||||
|
|
|
@ -20,7 +20,9 @@ package ca.uhn.fhir.jpa.api.model;
|
|||
* #L%
|
||||
*/
|
||||
|
||||
import ca.uhn.fhir.context.support.IValidationSupport;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.hl7.fhir.instance.model.api.IBaseCoding;
|
||||
import org.hl7.fhir.r4.model.BooleanType;
|
||||
import org.hl7.fhir.r4.model.CodeableConcept;
|
||||
import org.hl7.fhir.r4.model.Coding;
|
||||
|
@ -28,6 +30,7 @@ import org.hl7.fhir.r4.model.StringType;
|
|||
import org.hl7.fhir.r4.model.UriType;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
|
||||
public class TranslationRequest {
|
||||
|
@ -208,4 +211,40 @@ public class TranslationRequest {
|
|||
public boolean hasTargetSystem() {
|
||||
return myTargetSystem != null && myTargetSystem.hasValue();
|
||||
}
|
||||
|
||||
public IValidationSupport.TranslateCodeRequest asTranslateCodeRequest() {
|
||||
return new IValidationSupport.TranslateCodeRequest(
|
||||
Collections.unmodifiableList(this.getCodeableConcept().getCoding()),
|
||||
this.getTargetSystem() != null ? this.getTargetSystem().asStringValue() : null,
|
||||
this.getUrl() != null ? this.getUrl().asStringValue() : null,
|
||||
this.getConceptMapVersion() != null ? this.getConceptMapVersion().asStringValue() : null,
|
||||
this.getSource() != null ? this.getSource().asStringValue() : null,
|
||||
this.getTarget() != null ? this.getTarget().asStringValue() : null,
|
||||
this.getResourceId(),
|
||||
this.getReverseAsBoolean()
|
||||
);
|
||||
}
|
||||
|
||||
public static TranslationRequest fromTranslateCodeRequest(IValidationSupport.TranslateCodeRequest theRequest) {
|
||||
CodeableConcept sourceCodeableConcept = new CodeableConcept();
|
||||
for (IBaseCoding aCoding : theRequest.getCodings()) {
|
||||
sourceCodeableConcept
|
||||
.addCoding()
|
||||
.setSystem(aCoding.getSystem())
|
||||
.setCode(aCoding.getCode())
|
||||
.setVersion(((Coding) aCoding).getVersion());
|
||||
}
|
||||
|
||||
TranslationRequest translationRequest = new TranslationRequest();
|
||||
translationRequest.setCodeableConcept(sourceCodeableConcept);
|
||||
translationRequest.setConceptMapVersion(new StringType(theRequest.getConceptMapVersion()));
|
||||
translationRequest.setUrl(new UriType(theRequest.getConceptMapUrl()));
|
||||
translationRequest.setSource(new UriType(theRequest.getSourceValueSetUrl()));
|
||||
translationRequest.setTarget(new UriType(theRequest.getTargetValueSetUrl()));
|
||||
translationRequest.setTargetSystem(new UriType(theRequest.getTargetSystemUrl()));
|
||||
translationRequest.setResourceId(theRequest.getResourcePid());
|
||||
translationRequest.setReverse(theRequest.isReverse());
|
||||
return translationRequest;
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,12 +2,14 @@ package ca.uhn.fhir.rest.param;
|
|||
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.rest.api.Constants;
|
||||
import ca.uhn.fhir.rest.server.exceptions.InvalidRequestException;
|
||||
import org.junit.jupiter.api.Test;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.assertEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertFalse;
|
||||
import static org.junit.jupiter.api.Assertions.assertNotEquals;
|
||||
import static org.junit.jupiter.api.Assertions.assertTrue;
|
||||
import static org.junit.jupiter.api.Assertions.fail;
|
||||
|
||||
public class TokenParamTest {
|
||||
private static final FhirContext ourCtx = FhirContext.forR4Cached();
|
||||
|
@ -48,4 +50,32 @@ public class TokenParamTest {
|
|||
assertEquals("type-value|identifier-value", param.getValue());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNameNickname() {
|
||||
StringParam param = new StringParam();
|
||||
assertFalse(param.isNicknameExpand());
|
||||
param.setValueAsQueryToken(ourCtx, "name", Constants.PARAMQUALIFIER_NICKNAME, "kenny");
|
||||
assertTrue(param.isNicknameExpand());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGivenNickname() {
|
||||
StringParam param = new StringParam();
|
||||
assertFalse(param.isNicknameExpand());
|
||||
param.setValueAsQueryToken(ourCtx, "given", Constants.PARAMQUALIFIER_NICKNAME, "kenny");
|
||||
assertTrue(param.isNicknameExpand());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testInvalidNickname() {
|
||||
StringParam param = new StringParam();
|
||||
assertFalse(param.isNicknameExpand());
|
||||
try {
|
||||
param.setValueAsQueryToken(ourCtx, "family", Constants.PARAMQUALIFIER_NICKNAME, "kenny");
|
||||
fail();
|
||||
} catch (InvalidRequestException e) {
|
||||
assertEquals("HAPI-2077: Modifier :nickname may only be used with 'name' and 'given' search parameters", e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,5 +1,11 @@
|
|||
package org.hl7.fhir.common.hapi.validation.support;
|
||||
|
||||
import ca.uhn.fhir.context.BaseRuntimeChildDefinition;
|
||||
import ca.uhn.fhir.context.BaseRuntimeElementCompositeDefinition;
|
||||
import ca.uhn.fhir.context.BaseRuntimeElementDefinition;
|
||||
import ca.uhn.fhir.context.RuntimePrimitiveDatatypeDefinition;
|
||||
import ca.uhn.fhir.context.support.TranslateConceptResult;
|
||||
import ca.uhn.fhir.context.support.TranslateConceptResults;
|
||||
import ca.uhn.fhir.i18n.Msg;
|
||||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.FhirVersionEnum;
|
||||
|
@ -11,20 +17,29 @@ import ca.uhn.fhir.rest.client.api.IGenericClient;
|
|||
import ca.uhn.fhir.util.BundleUtil;
|
||||
import ca.uhn.fhir.util.JsonUtil;
|
||||
import ca.uhn.fhir.util.ParametersUtil;
|
||||
import ca.uhn.fhir.util.StringUtil;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.commons.lang3.Validate;
|
||||
import org.hl7.fhir.instance.model.api.IBase;
|
||||
import org.hl7.fhir.instance.model.api.IBaseBundle;
|
||||
import org.hl7.fhir.instance.model.api.IBaseCoding;
|
||||
import org.hl7.fhir.instance.model.api.IBaseParameters;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.instance.model.api.IPrimitiveType;
|
||||
import org.hl7.fhir.r4.model.CodeSystem;
|
||||
import org.hl7.fhir.r4.model.Coding;
|
||||
import org.hl7.fhir.r4.model.Parameters;
|
||||
import org.hl7.fhir.r4.model.ValueSet;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
import java.io.IOException;
|
||||
import java.sql.Array;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Objects;
|
||||
import java.util.Optional;
|
||||
|
||||
import static org.apache.commons.lang3.StringUtils.isBlank;
|
||||
import static org.apache.commons.lang3.StringUtils.isNotBlank;
|
||||
|
@ -331,6 +346,23 @@ public class RemoteTerminologyServiceValidationSupport extends BaseValidationSup
|
|||
return fetchValueSet(theValueSetUrl) != null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public TranslateConceptResults translateConcept(TranslateCodeRequest theRequest) {
|
||||
IGenericClient client = provideClient();
|
||||
FhirContext fhirContext = client.getFhirContext();
|
||||
|
||||
IBaseParameters params = buildTranslateInputParameters(fhirContext, theRequest);
|
||||
|
||||
IBaseParameters outcome = client
|
||||
.operation()
|
||||
.onType("ConceptMap")
|
||||
.named("$translate")
|
||||
.withParameters(params)
|
||||
.execute();
|
||||
|
||||
return translateOutcomeToResults(fhirContext, outcome);
|
||||
}
|
||||
|
||||
private IGenericClient provideClient() {
|
||||
IGenericClient retVal = myCtx.newRestfulGenericClient(myBaseUrl);
|
||||
for (Object next : myClientInterceptors) {
|
||||
|
@ -442,4 +474,103 @@ public class RemoteTerminologyServiceValidationSupport extends BaseValidationSup
|
|||
myClientInterceptors.add(theClientInterceptor);
|
||||
}
|
||||
|
||||
private IBaseParameters buildTranslateInputParameters(FhirContext fhirContext, TranslateCodeRequest theRequest) {
|
||||
IBaseParameters params = ParametersUtil.newInstance(fhirContext);
|
||||
if (!StringUtils.isEmpty(theRequest.getConceptMapUrl())) {
|
||||
ParametersUtil.addParameterToParametersUri(fhirContext, params, "url", theRequest.getConceptMapUrl());
|
||||
}
|
||||
if (!StringUtils.isEmpty(theRequest.getConceptMapVersion())) {
|
||||
ParametersUtil.addParameterToParametersString(fhirContext, params, "conceptMapVersion", theRequest.getConceptMapVersion());
|
||||
}
|
||||
if (theRequest.getCodings() != null) {
|
||||
addCodingsToTranslateParameters(fhirContext, theRequest.getCodings(), params);
|
||||
}
|
||||
if (!StringUtils.isEmpty(theRequest.getSourceValueSetUrl())) {
|
||||
ParametersUtil.addParameterToParametersUri(fhirContext, params, "source", theRequest.getSourceValueSetUrl());
|
||||
}
|
||||
if (!StringUtils.isEmpty(theRequest.getTargetValueSetUrl())) {
|
||||
ParametersUtil.addParameterToParametersUri(fhirContext, params, "target", theRequest.getTargetValueSetUrl());
|
||||
}
|
||||
if (!StringUtils.isEmpty(theRequest.getTargetSystemUrl())) {
|
||||
ParametersUtil.addParameterToParametersUri(fhirContext, params, "targetsystem", theRequest.getTargetSystemUrl());
|
||||
}
|
||||
if (theRequest.isReverse()) {
|
||||
ParametersUtil.addParameterToParametersBoolean(fhirContext, params, "reverse", theRequest.isReverse());
|
||||
}
|
||||
|
||||
return params;
|
||||
}
|
||||
|
||||
private void addCodingsToTranslateParameters(FhirContext fhirContext, List<IBaseCoding> theCodings, IBaseParameters theParams) {
|
||||
BaseRuntimeElementCompositeDefinition<?> codeableConceptDef = (BaseRuntimeElementCompositeDefinition<?>) Objects.requireNonNull(fhirContext.getElementDefinition("CodeableConcept"));
|
||||
BaseRuntimeChildDefinition codings = codeableConceptDef.getChildByName("coding");
|
||||
BaseRuntimeElementCompositeDefinition<?> codingDef = (BaseRuntimeElementCompositeDefinition<?>) Objects.requireNonNull(fhirContext.getElementDefinition("Coding"));
|
||||
BaseRuntimeChildDefinition codingSystemChild = codingDef.getChildByName("system");
|
||||
BaseRuntimeChildDefinition codingCodeChild = codingDef.getChildByName("code");
|
||||
BaseRuntimeElementDefinition<IPrimitiveType<?>> systemDef = (RuntimePrimitiveDatatypeDefinition) fhirContext.getElementDefinition("uri");
|
||||
BaseRuntimeElementDefinition<IPrimitiveType<?>> codeDef = (RuntimePrimitiveDatatypeDefinition) fhirContext.getElementDefinition("code");
|
||||
|
||||
IBase codeableConcept = codeableConceptDef.newInstance();
|
||||
|
||||
for (IBaseCoding aCoding : theCodings) {
|
||||
IBaseCoding newCoding = (IBaseCoding) codingDef.newInstance();
|
||||
|
||||
IPrimitiveType<?> newSystem = systemDef.newInstance(aCoding.getSystem());
|
||||
codingSystemChild.getMutator().addValue(newCoding, newSystem);
|
||||
IPrimitiveType<?> newCode = codeDef.newInstance(aCoding.getCode());
|
||||
codingCodeChild.getMutator().addValue(newCoding, newCode);
|
||||
|
||||
codings.getMutator().addValue(codeableConcept, newCoding);
|
||||
}
|
||||
|
||||
ParametersUtil.addParameterToParameters(fhirContext, theParams, "codeableConcept", codeableConcept);
|
||||
}
|
||||
|
||||
private TranslateConceptResults translateOutcomeToResults(FhirContext fhirContext, IBaseParameters outcome) {
|
||||
Optional<String> result = ParametersUtil.getNamedParameterValueAsString(fhirContext, outcome, "result");
|
||||
Optional<String> message = ParametersUtil.getNamedParameterValueAsString(fhirContext, outcome, "message");
|
||||
List<IBase> matches = ParametersUtil.getNamedParameters(fhirContext, outcome, "match");
|
||||
|
||||
TranslateConceptResults retVal = new TranslateConceptResults();
|
||||
if (result.isPresent()) {
|
||||
retVal.setResult(Boolean.parseBoolean(result.get()));
|
||||
}
|
||||
if (message.isPresent()) {
|
||||
retVal.setMessage(message.get());
|
||||
}
|
||||
if (!matches.isEmpty()) {
|
||||
retVal.setResults(matchesToTranslateConceptResults(fhirContext, matches));
|
||||
}
|
||||
|
||||
return retVal;
|
||||
}
|
||||
|
||||
private List<TranslateConceptResult> matchesToTranslateConceptResults(FhirContext fhirContext, List<IBase> theMatches) {
|
||||
List<TranslateConceptResult> resultList = new ArrayList();
|
||||
for (IBase m : theMatches) {
|
||||
TranslateConceptResult match = new TranslateConceptResult();
|
||||
String equivalence = ParametersUtil.getParameterPartValueAsString(fhirContext, m, "equivalence");
|
||||
Optional<IBase> concept = ParametersUtil.getParameterPartValue(fhirContext, m, "concept");
|
||||
String source = ParametersUtil.getParameterPartValueAsString(fhirContext, m, "source");
|
||||
|
||||
if (StringUtils.isNotBlank(equivalence)) {
|
||||
match.setEquivalence(equivalence);
|
||||
}
|
||||
|
||||
if (concept.isPresent()) {
|
||||
IBaseCoding matchedCoding = (IBaseCoding) concept.get();
|
||||
match.setSystem(matchedCoding.getSystem());
|
||||
match.setCode(matchedCoding.getCode());
|
||||
match.setDisplay(matchedCoding.getDisplay());
|
||||
|
||||
if (StringUtils.isNotBlank(source)) {
|
||||
match.setConceptMapUrl(source);
|
||||
}
|
||||
|
||||
resultList.add(match);
|
||||
}
|
||||
}
|
||||
return resultList;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -3,6 +3,8 @@ package org.hl7.fhir.common.hapi.validation.support;
|
|||
import ca.uhn.fhir.context.FhirContext;
|
||||
import ca.uhn.fhir.context.support.ConceptValidationOptions;
|
||||
import ca.uhn.fhir.context.support.IValidationSupport;
|
||||
import ca.uhn.fhir.context.support.TranslateConceptResult;
|
||||
import ca.uhn.fhir.context.support.TranslateConceptResults;
|
||||
import ca.uhn.fhir.jpa.model.util.JpaConstants;
|
||||
import ca.uhn.fhir.parser.IJsonLikeParser;
|
||||
import ca.uhn.fhir.rest.annotation.IdParam;
|
||||
|
@ -20,11 +22,14 @@ import ca.uhn.fhir.rest.server.IResourceProvider;
|
|||
import ca.uhn.fhir.test.utilities.server.RestfulServerExtension;
|
||||
import ca.uhn.fhir.util.ParametersUtil;
|
||||
import com.google.common.collect.Lists;
|
||||
import org.hl7.fhir.instance.model.api.IBaseCoding;
|
||||
import org.hl7.fhir.instance.model.api.IBaseResource;
|
||||
import org.hl7.fhir.r4.model.BooleanType;
|
||||
import org.hl7.fhir.r4.model.CodeSystem;
|
||||
import org.hl7.fhir.r4.model.CodeType;
|
||||
import org.hl7.fhir.r4.model.CodeableConcept;
|
||||
import org.hl7.fhir.r4.model.Coding;
|
||||
import org.hl7.fhir.r4.model.ConceptMap;
|
||||
import org.hl7.fhir.r4.model.IdType;
|
||||
import org.hl7.fhir.r4.model.Parameters;
|
||||
import org.hl7.fhir.r4.model.StringType;
|
||||
|
@ -58,7 +63,18 @@ public class RemoteTerminologyServiceValidationSupportTest {
|
|||
private static final String CODE_SYSTEM_VERSION_AS_TEXT = "v2.1.12";
|
||||
private static final String CODE = "CODE";
|
||||
private static final String VALUE_SET_URL = "http://value.set/url";
|
||||
private static final String TARGET_SYSTEM = "http://target.system/url";
|
||||
private static final String CONCEPT_MAP_URL = "http://concept.map/url";
|
||||
private static final String CONCEPT_MAP_VERSION = "2.1";
|
||||
private static final String SOURCE_VALUE_SET_URL = "http://source.vs.system/url";
|
||||
private static final String TARGET_VALUE_SET_URL = "http://target.vs.system/url";
|
||||
private static final String TARGET_CODE = "CODE";
|
||||
private static final String TARGET_CODE_DISPLAY = "code";
|
||||
private static final boolean REVERSE = true;
|
||||
private static final String EQUIVALENCE_CODE = "equivalent";
|
||||
|
||||
private static final String ERROR_MESSAGE = "This is an error message";
|
||||
private static final String SUCCESS_MESSAGE = "This is a success message";
|
||||
|
||||
private static FhirContext ourCtx = FhirContext.forR4Cached();
|
||||
|
||||
|
@ -68,6 +84,7 @@ public class RemoteTerminologyServiceValidationSupportTest {
|
|||
private MyValueSetProvider myValueSetProvider;
|
||||
private RemoteTerminologyServiceValidationSupport mySvc;
|
||||
private MyCodeSystemProvider myCodeSystemProvider;
|
||||
private MyConceptMapProvider myConceptMapProvider;
|
||||
|
||||
@BeforeEach
|
||||
public void before() {
|
||||
|
@ -77,6 +94,9 @@ public class RemoteTerminologyServiceValidationSupportTest {
|
|||
myCodeSystemProvider = new MyCodeSystemProvider();
|
||||
myRestfulServerExtension.getRestfulServer().registerProvider(myCodeSystemProvider);
|
||||
|
||||
myConceptMapProvider = new MyConceptMapProvider();
|
||||
myRestfulServerExtension.getRestfulServer().registerProvider(myConceptMapProvider);
|
||||
|
||||
String baseUrl = "http://localhost:" + myRestfulServerExtension.getPort();
|
||||
|
||||
mySvc = new RemoteTerminologyServiceValidationSupport(ourCtx);
|
||||
|
@ -277,6 +297,91 @@ public class RemoteTerminologyServiceValidationSupportTest {
|
|||
assertEquals(null, outcome);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTranslateCode_AllInParams_AllOutParams() {
|
||||
myConceptMapProvider.myNextReturnParams = new Parameters();
|
||||
myConceptMapProvider.myNextReturnParams.addParameter("result", true);
|
||||
myConceptMapProvider.myNextReturnParams.addParameter("message", ERROR_MESSAGE);
|
||||
|
||||
TranslateConceptResults expectedResults = new TranslateConceptResults();
|
||||
expectedResults.setResult(true);
|
||||
|
||||
// Add 2 matches
|
||||
addMatchToTranslateRequest(myConceptMapProvider.myNextReturnParams);
|
||||
addMatchToTranslateRequest(myConceptMapProvider.myNextReturnParams);
|
||||
|
||||
List<TranslateConceptResult> translateResults = new ArrayList<>();
|
||||
TranslateConceptResult singleResult = new TranslateConceptResult();
|
||||
singleResult
|
||||
.setEquivalence(EQUIVALENCE_CODE)
|
||||
.setSystem(TARGET_SYSTEM)
|
||||
.setCode(TARGET_CODE)
|
||||
.setConceptMapUrl(CONCEPT_MAP_URL)
|
||||
.setDisplay(TARGET_CODE_DISPLAY);
|
||||
translateResults.add(singleResult);
|
||||
translateResults.add(singleResult);
|
||||
expectedResults.setResults(translateResults);
|
||||
|
||||
CodeableConcept codeableConcept = new CodeableConcept();
|
||||
codeableConcept.addCoding(new Coding(CODE_SYSTEM, CODE, null));
|
||||
|
||||
IValidationSupport.TranslateCodeRequest request = new IValidationSupport.TranslateCodeRequest(
|
||||
Collections.unmodifiableList(codeableConcept.getCoding()),
|
||||
TARGET_SYSTEM,
|
||||
CONCEPT_MAP_URL,
|
||||
CONCEPT_MAP_VERSION,
|
||||
SOURCE_VALUE_SET_URL,
|
||||
TARGET_VALUE_SET_URL,
|
||||
null,
|
||||
REVERSE);
|
||||
|
||||
TranslateConceptResults results = mySvc.translateConcept(request);
|
||||
|
||||
assertEquals(results.getResult(), true);
|
||||
assertEquals(results.getResults().size(), 2);
|
||||
for(TranslateConceptResult result : results.getResults()) {
|
||||
assertEquals(singleResult, result);
|
||||
}
|
||||
|
||||
assertTrue(codeableConcept.equalsDeep(myConceptMapProvider.myLastCodeableConcept));
|
||||
assertEquals(TARGET_SYSTEM, myConceptMapProvider.myLastTargetCodeSystem.getValue());
|
||||
assertEquals(CONCEPT_MAP_URL, myConceptMapProvider.myLastConceptMapUrl.getValue());
|
||||
assertEquals(CONCEPT_MAP_VERSION, myConceptMapProvider.myLastConceptMapVersion.getValue());
|
||||
assertEquals(SOURCE_VALUE_SET_URL, myConceptMapProvider.myLastSourceValueSet.getValue());
|
||||
assertEquals(TARGET_VALUE_SET_URL, myConceptMapProvider.myLastTargetValueSet.getValue());
|
||||
assertEquals(REVERSE, myConceptMapProvider.myLastReverse.getValue());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTranslateCode_NoInParams_NoOutParams() {
|
||||
myConceptMapProvider.myNextReturnParams = new Parameters();
|
||||
|
||||
List<IBaseCoding> codings = new ArrayList<>();
|
||||
codings.add(new Coding(null, null, null));
|
||||
IValidationSupport.TranslateCodeRequest request = new IValidationSupport.TranslateCodeRequest(codings, null);
|
||||
|
||||
TranslateConceptResults results = mySvc.translateConcept(request);
|
||||
|
||||
assertEquals(results.getResult(), false);
|
||||
assertEquals(results.getResults().size(), 0);
|
||||
|
||||
assertNull(myConceptMapProvider.myLastCodeableConcept);
|
||||
assertNull(myConceptMapProvider.myLastTargetCodeSystem);
|
||||
assertNull(myConceptMapProvider.myLastConceptMapUrl);
|
||||
assertNull(myConceptMapProvider.myLastConceptMapVersion);
|
||||
assertNull(myConceptMapProvider.myLastSourceValueSet);
|
||||
assertNull(myConceptMapProvider.myLastTargetValueSet);
|
||||
assertNull(myConceptMapProvider.myLastReverse);
|
||||
}
|
||||
|
||||
private void addMatchToTranslateRequest(Parameters params) {
|
||||
Parameters.ParametersParameterComponent matchParam = params.addParameter().setName("match");
|
||||
matchParam.addPart().setName("equivalence").setValue(new CodeType(EQUIVALENCE_CODE));
|
||||
Coding value = new Coding(TARGET_SYSTEM, TARGET_CODE, TARGET_CODE_DISPLAY);
|
||||
matchParam.addPart().setName("concept").setValue(value);
|
||||
matchParam.addPart().setName("source").setValue(new UriType(CONCEPT_MAP_URL));
|
||||
}
|
||||
|
||||
/**
|
||||
* Remote terminology services can be used to validate codes when code system is present,
|
||||
* even when inferSystem is true
|
||||
|
@ -636,5 +741,50 @@ public class RemoteTerminologyServiceValidationSupportTest {
|
|||
|
||||
}
|
||||
|
||||
private static class MyConceptMapProvider implements IResourceProvider {
|
||||
private UriType myLastConceptMapUrl;
|
||||
private StringType myLastConceptMapVersion;
|
||||
private CodeableConcept myLastCodeableConcept;
|
||||
private UriType myLastSourceValueSet;
|
||||
private UriType myLastTargetValueSet;
|
||||
private UriType myLastTargetCodeSystem;
|
||||
private BooleanType myLastReverse;
|
||||
|
||||
private int myInvocationCount;
|
||||
private Parameters myNextReturnParams;
|
||||
|
||||
@Operation(name = JpaConstants.OPERATION_TRANSLATE, idempotent = true, returnParameters = {
|
||||
@OperationParam(name = "result", type = BooleanType.class, min = 1, max = 1),
|
||||
@OperationParam(name = "message", type = StringType.class, min = 0, max = 1),
|
||||
})
|
||||
public Parameters translate(
|
||||
HttpServletRequest theServletRequest,
|
||||
@IdParam(optional = true) IdType theId,
|
||||
@OperationParam(name = "url", min = 0, max = 1) UriType theConceptMapUrl,
|
||||
@OperationParam(name = "conceptMapVersion", min = 0, max = 1) StringType theConceptMapVersion,
|
||||
@OperationParam(name = "codeableConcept", min = 0, max = 1) CodeableConcept theSourceCodeableConcept,
|
||||
@OperationParam(name = "source", min = 0, max = 1) UriType theSourceValueSet,
|
||||
@OperationParam(name = "target", min = 0, max = 1) UriType theTargetValueSet,
|
||||
@OperationParam(name = "targetsystem", min = 0, max = 1) UriType theTargetCodeSystem,
|
||||
@OperationParam(name = "reverse", min = 0, max = 1) BooleanType theReverse,
|
||||
RequestDetails theRequestDetails
|
||||
) {
|
||||
myInvocationCount++;
|
||||
myLastConceptMapUrl = theConceptMapUrl;
|
||||
myLastConceptMapVersion = theConceptMapVersion;
|
||||
myLastCodeableConcept = theSourceCodeableConcept;
|
||||
myLastSourceValueSet = theSourceValueSet;
|
||||
myLastTargetValueSet = theTargetValueSet;
|
||||
myLastTargetCodeSystem = theTargetCodeSystem;
|
||||
myLastReverse = theReverse;
|
||||
return myNextReturnParams;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Class<? extends IBaseResource> getResourceType() {
|
||||
return ConceptMap.class;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue